tm.texi (BLOCK_REG_PADDING): Describe.
[gcc.git] / gcc / config / rs6000 / rs6000.c
1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
34 #include "flags.h"
35 #include "recog.h"
36 #include "obstack.h"
37 #include "tree.h"
38 #include "expr.h"
39 #include "optabs.h"
40 #include "except.h"
41 #include "function.h"
42 #include "output.h"
43 #include "basic-block.h"
44 #include "integrate.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "hashtab.h"
48 #include "tm_p.h"
49 #include "target.h"
50 #include "target-def.h"
51 #include "langhooks.h"
52 #include "reload.h"
53 #include "cfglayout.h"
54 #if TARGET_XCOFF
55 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
56 #endif
57
58 #ifndef TARGET_NO_PROTOTYPE
59 #define TARGET_NO_PROTOTYPE 0
60 #endif
61
62 #define EASY_VECTOR_15(n, x, y) ((n) >= -16 && (n) <= 15 \
63 && easy_vector_same (x, y))
64
65 #define EASY_VECTOR_15_ADD_SELF(n, x, y) ((n) >= 0x10 && (n) <= 0x1e \
66 && !((n) & 1) \
67 && easy_vector_same (x, y))
68
69 #define min(A,B) ((A) < (B) ? (A) : (B))
70 #define max(A,B) ((A) > (B) ? (A) : (B))
71
72 /* Target cpu type */
73
74 enum processor_type rs6000_cpu;
75 struct rs6000_cpu_select rs6000_select[3] =
76 {
77 /* switch name, tune arch */
78 { (const char *)0, "--with-cpu=", 1, 1 },
79 { (const char *)0, "-mcpu=", 1, 1 },
80 { (const char *)0, "-mtune=", 1, 0 },
81 };
82
83 /* Size of long double */
84 const char *rs6000_long_double_size_string;
85 int rs6000_long_double_type_size;
86
87 /* Whether -mabi=altivec has appeared */
88 int rs6000_altivec_abi;
89
90 /* Whether VRSAVE instructions should be generated. */
91 int rs6000_altivec_vrsave;
92
93 /* String from -mvrsave= option. */
94 const char *rs6000_altivec_vrsave_string;
95
96 /* Nonzero if we want SPE ABI extensions. */
97 int rs6000_spe_abi;
98
99 /* Whether isel instructions should be generated. */
100 int rs6000_isel;
101
102 /* Whether SPE simd instructions should be generated. */
103 int rs6000_spe;
104
105 /* Nonzero if floating point operations are done in the GPRs. */
106 int rs6000_float_gprs = 0;
107
108 /* String from -mfloat-gprs=. */
109 const char *rs6000_float_gprs_string;
110
111 /* String from -misel=. */
112 const char *rs6000_isel_string;
113
114 /* String from -mspe=. */
115 const char *rs6000_spe_string;
116
117 /* Set to nonzero once AIX common-mode calls have been defined. */
118 static GTY(()) int common_mode_defined;
119
120 /* Save information from a "cmpxx" operation until the branch or scc is
121 emitted. */
122 rtx rs6000_compare_op0, rs6000_compare_op1;
123 int rs6000_compare_fp_p;
124
125 /* Label number of label created for -mrelocatable, to call to so we can
126 get the address of the GOT section */
127 int rs6000_pic_labelno;
128
129 #ifdef USING_ELFOS_H
130 /* Which abi to adhere to */
131 const char *rs6000_abi_name;
132
133 /* Semantics of the small data area */
134 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
135
136 /* Which small data model to use */
137 const char *rs6000_sdata_name = (char *)0;
138
139 /* Counter for labels which are to be placed in .fixup. */
140 int fixuplabelno = 0;
141 #endif
142
143 /* Bit size of immediate TLS offsets and string from which it is decoded. */
144 int rs6000_tls_size = 32;
145 const char *rs6000_tls_size_string;
146
147 /* ABI enumeration available for subtarget to use. */
148 enum rs6000_abi rs6000_current_abi;
149
150 /* ABI string from -mabi= option. */
151 const char *rs6000_abi_string;
152
153 /* Debug flags */
154 const char *rs6000_debug_name;
155 int rs6000_debug_stack; /* debug stack applications */
156 int rs6000_debug_arg; /* debug argument handling */
157
158 /* Opaque types. */
159 static GTY(()) tree opaque_V2SI_type_node;
160 static GTY(()) tree opaque_V2SF_type_node;
161 static GTY(()) tree opaque_p_V2SI_type_node;
162
163 const char *rs6000_traceback_name;
164 static enum {
165 traceback_default = 0,
166 traceback_none,
167 traceback_part,
168 traceback_full
169 } rs6000_traceback;
170
171 /* Flag to say the TOC is initialized */
172 int toc_initialized;
173 char toc_label_name[10];
174
175 /* Alias set for saves and restores from the rs6000 stack. */
176 static int rs6000_sr_alias_set;
177
178 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
179 The only place that looks at this is rs6000_set_default_type_attributes;
180 everywhere else should rely on the presence or absence of a longcall
181 attribute on the function declaration. */
182 int rs6000_default_long_calls;
183 const char *rs6000_longcall_switch;
184
185 /* Control alignment for fields within structures. */
186 /* String from -malign-XXXXX. */
187 const char *rs6000_alignment_string;
188 int rs6000_alignment_flags;
189
190 struct builtin_description
191 {
192 /* mask is not const because we're going to alter it below. This
193 nonsense will go away when we rewrite the -march infrastructure
194 to give us more target flag bits. */
195 unsigned int mask;
196 const enum insn_code icode;
197 const char *const name;
198 const enum rs6000_builtins code;
199 };
200
201 static bool rs6000_function_ok_for_sibcall PARAMS ((tree, tree));
202 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
203 static void validate_condition_mode
204 PARAMS ((enum rtx_code, enum machine_mode));
205 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
206 static void rs6000_maybe_dead PARAMS ((rtx));
207 static void rs6000_emit_stack_tie PARAMS ((void));
208 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
209 static rtx spe_synthesize_frame_save PARAMS ((rtx));
210 static bool spe_func_has_64bit_regs_p PARAMS ((void));
211 static void emit_frame_save PARAMS ((rtx, rtx, enum machine_mode,
212 unsigned int, int, int));
213 static rtx gen_frame_mem_offset PARAMS ((enum machine_mode, rtx, int));
214 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
215 static unsigned rs6000_hash_constant PARAMS ((rtx));
216 static unsigned toc_hash_function PARAMS ((const void *));
217 static int toc_hash_eq PARAMS ((const void *, const void *));
218 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
219 static bool constant_pool_expr_p PARAMS ((rtx));
220 static bool toc_relative_expr_p PARAMS ((rtx));
221 static bool legitimate_small_data_p PARAMS ((enum machine_mode, rtx));
222 static bool legitimate_offset_address_p PARAMS ((enum machine_mode, rtx, int));
223 static bool legitimate_indexed_address_p PARAMS ((rtx, int));
224 static bool legitimate_indirect_address_p PARAMS ((rtx, int));
225 static bool legitimate_lo_sum_address_p PARAMS ((enum machine_mode, rtx, int));
226 static struct machine_function * rs6000_init_machine_status PARAMS ((void));
227 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
228 #ifdef HAVE_GAS_HIDDEN
229 static void rs6000_assemble_visibility PARAMS ((tree, int));
230 #endif
231 static int rs6000_ra_ever_killed PARAMS ((void));
232 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
233 extern const struct attribute_spec rs6000_attribute_table[];
234 static void rs6000_set_default_type_attributes PARAMS ((tree));
235 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
236 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
237 static void rs6000_output_mi_thunk PARAMS ((FILE *, tree, HOST_WIDE_INT,
238 HOST_WIDE_INT, tree));
239 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
240 HOST_WIDE_INT, HOST_WIDE_INT));
241 static void rs6000_file_start PARAMS ((void));
242 #if TARGET_ELF
243 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
244 int));
245 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
246 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
247 static void rs6000_elf_select_section PARAMS ((tree, int,
248 unsigned HOST_WIDE_INT));
249 static void rs6000_elf_unique_section PARAMS ((tree, int));
250 static void rs6000_elf_select_rtx_section PARAMS ((enum machine_mode, rtx,
251 unsigned HOST_WIDE_INT));
252 static void rs6000_elf_encode_section_info PARAMS ((tree, rtx, int))
253 ATTRIBUTE_UNUSED;
254 static bool rs6000_elf_in_small_data_p PARAMS ((tree));
255 #endif
256 #if TARGET_XCOFF
257 static void rs6000_xcoff_asm_globalize_label PARAMS ((FILE *, const char *));
258 static void rs6000_xcoff_asm_named_section PARAMS ((const char *, unsigned int));
259 static void rs6000_xcoff_select_section PARAMS ((tree, int,
260 unsigned HOST_WIDE_INT));
261 static void rs6000_xcoff_unique_section PARAMS ((tree, int));
262 static void rs6000_xcoff_select_rtx_section PARAMS ((enum machine_mode, rtx,
263 unsigned HOST_WIDE_INT));
264 static const char * rs6000_xcoff_strip_name_encoding PARAMS ((const char *));
265 static unsigned int rs6000_xcoff_section_type_flags PARAMS ((tree, const char *, int));
266 static void rs6000_xcoff_file_start PARAMS ((void));
267 static void rs6000_xcoff_file_end PARAMS ((void));
268 #endif
269 #if TARGET_MACHO
270 static bool rs6000_binds_local_p PARAMS ((tree));
271 #endif
272 static int rs6000_use_dfa_pipeline_interface PARAMS ((void));
273 static int rs6000_variable_issue PARAMS ((FILE *, int, rtx, int));
274 static bool rs6000_rtx_costs PARAMS ((rtx, int, int, int *));
275 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
276 static int rs6000_adjust_priority PARAMS ((rtx, int));
277 static int rs6000_issue_rate PARAMS ((void));
278 static int rs6000_use_sched_lookahead PARAMS ((void));
279
280 static void rs6000_init_builtins PARAMS ((void));
281 static rtx rs6000_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
282 static rtx rs6000_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
283 static rtx rs6000_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
284 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
285 static void altivec_init_builtins PARAMS ((void));
286 static void rs6000_common_init_builtins PARAMS ((void));
287
288 static void enable_mask_for_builtins PARAMS ((struct builtin_description *,
289 int, enum rs6000_builtins,
290 enum rs6000_builtins));
291 static void spe_init_builtins PARAMS ((void));
292 static rtx spe_expand_builtin PARAMS ((tree, rtx, bool *));
293 static rtx spe_expand_predicate_builtin PARAMS ((enum insn_code, tree, rtx));
294 static rtx spe_expand_evsel_builtin PARAMS ((enum insn_code, tree, rtx));
295 static int rs6000_emit_int_cmove PARAMS ((rtx, rtx, rtx, rtx));
296
297 static rtx altivec_expand_builtin PARAMS ((tree, rtx, bool *));
298 static rtx altivec_expand_ld_builtin PARAMS ((tree, rtx, bool *));
299 static rtx altivec_expand_st_builtin PARAMS ((tree, rtx, bool *));
300 static rtx altivec_expand_dst_builtin PARAMS ((tree, rtx, bool *));
301 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
302 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
303 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
304 static void rs6000_parse_abi_options PARAMS ((void));
305 static void rs6000_parse_alignment_option PARAMS ((void));
306 static void rs6000_parse_tls_size_option PARAMS ((void));
307 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
308 static int first_altivec_reg_to_save PARAMS ((void));
309 static unsigned int compute_vrsave_mask PARAMS ((void));
310 static void is_altivec_return_reg PARAMS ((rtx, void *));
311 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
312 int easy_vector_constant PARAMS ((rtx, enum machine_mode));
313 static int easy_vector_same PARAMS ((rtx, enum machine_mode));
314 static bool is_ev64_opaque_type PARAMS ((tree));
315 static rtx rs6000_dwarf_register_span PARAMS ((rtx));
316 static rtx rs6000_legitimize_tls_address PARAMS ((rtx, enum tls_model));
317 static rtx rs6000_tls_get_addr PARAMS ((void));
318 static rtx rs6000_got_sym PARAMS ((void));
319 static inline int rs6000_tls_symbol_ref_1 PARAMS ((rtx *, void *));
320 static const char *rs6000_get_some_local_dynamic_name PARAMS ((void));
321 static int rs6000_get_some_local_dynamic_name_1 PARAMS ((rtx *, void *));
322 static rtx rs6000_complex_function_value (enum machine_mode);
323 static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *, enum machine_mode, tree);
324
325 /* Hash table stuff for keeping track of TOC entries. */
326
327 struct toc_hash_struct GTY(())
328 {
329 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
330 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
331 rtx key;
332 enum machine_mode key_mode;
333 int labelno;
334 };
335
336 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
337 \f
338 /* Default register names. */
339 char rs6000_reg_names[][8] =
340 {
341 "0", "1", "2", "3", "4", "5", "6", "7",
342 "8", "9", "10", "11", "12", "13", "14", "15",
343 "16", "17", "18", "19", "20", "21", "22", "23",
344 "24", "25", "26", "27", "28", "29", "30", "31",
345 "0", "1", "2", "3", "4", "5", "6", "7",
346 "8", "9", "10", "11", "12", "13", "14", "15",
347 "16", "17", "18", "19", "20", "21", "22", "23",
348 "24", "25", "26", "27", "28", "29", "30", "31",
349 "mq", "lr", "ctr","ap",
350 "0", "1", "2", "3", "4", "5", "6", "7",
351 "xer",
352 /* AltiVec registers. */
353 "0", "1", "2", "3", "4", "5", "6", "7",
354 "8", "9", "10", "11", "12", "13", "14", "15",
355 "16", "17", "18", "19", "20", "21", "22", "23",
356 "24", "25", "26", "27", "28", "29", "30", "31",
357 "vrsave", "vscr",
358 /* SPE registers. */
359 "spe_acc", "spefscr"
360 };
361
362 #ifdef TARGET_REGNAMES
363 static const char alt_reg_names[][8] =
364 {
365 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
366 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
367 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
368 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
369 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
370 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
371 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
372 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
373 "mq", "lr", "ctr", "ap",
374 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
375 "xer",
376 /* AltiVec registers. */
377 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
378 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
379 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
380 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
381 "vrsave", "vscr",
382 /* SPE registers. */
383 "spe_acc", "spefscr"
384 };
385 #endif
386 \f
387 #ifndef MASK_STRICT_ALIGN
388 #define MASK_STRICT_ALIGN 0
389 #endif
390 #ifndef TARGET_PROFILE_KERNEL
391 #define TARGET_PROFILE_KERNEL 0
392 #endif
393
394 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
395 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
396
397 /* Return 1 for a symbol ref for a thread-local storage symbol. */
398 #define RS6000_SYMBOL_REF_TLS_P(RTX) \
399 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
400 \f
401 /* Initialize the GCC target structure. */
402 #undef TARGET_ATTRIBUTE_TABLE
403 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
404 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
405 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
406
407 #undef TARGET_ASM_ALIGNED_DI_OP
408 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
409
410 /* Default unaligned ops are only provided for ELF. Find the ops needed
411 for non-ELF systems. */
412 #ifndef OBJECT_FORMAT_ELF
413 #if TARGET_XCOFF
414 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
415 64-bit targets. */
416 #undef TARGET_ASM_UNALIGNED_HI_OP
417 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
418 #undef TARGET_ASM_UNALIGNED_SI_OP
419 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
420 #undef TARGET_ASM_UNALIGNED_DI_OP
421 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
422 #else
423 /* For Darwin. */
424 #undef TARGET_ASM_UNALIGNED_HI_OP
425 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
426 #undef TARGET_ASM_UNALIGNED_SI_OP
427 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
428 #endif
429 #endif
430
431 /* This hook deals with fixups for relocatable code and DI-mode objects
432 in 64-bit code. */
433 #undef TARGET_ASM_INTEGER
434 #define TARGET_ASM_INTEGER rs6000_assemble_integer
435
436 #ifdef HAVE_GAS_HIDDEN
437 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
438 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
439 #endif
440
441 #undef TARGET_HAVE_TLS
442 #define TARGET_HAVE_TLS HAVE_AS_TLS
443
444 #undef TARGET_CANNOT_FORCE_CONST_MEM
445 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
446
447 #undef TARGET_ASM_FUNCTION_PROLOGUE
448 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
449 #undef TARGET_ASM_FUNCTION_EPILOGUE
450 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
451
452 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
453 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
454 #undef TARGET_SCHED_VARIABLE_ISSUE
455 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
456
457 #undef TARGET_SCHED_ISSUE_RATE
458 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
459 #undef TARGET_SCHED_ADJUST_COST
460 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
461 #undef TARGET_SCHED_ADJUST_PRIORITY
462 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
463
464 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
465 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
466
467 #undef TARGET_INIT_BUILTINS
468 #define TARGET_INIT_BUILTINS rs6000_init_builtins
469
470 #undef TARGET_EXPAND_BUILTIN
471 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
472
473 #if TARGET_MACHO
474 #undef TARGET_BINDS_LOCAL_P
475 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
476 #endif
477
478 #undef TARGET_ASM_OUTPUT_MI_THUNK
479 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
480
481 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
482 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
483
484 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
485 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
486
487 #undef TARGET_RTX_COSTS
488 #define TARGET_RTX_COSTS rs6000_rtx_costs
489 #undef TARGET_ADDRESS_COST
490 #define TARGET_ADDRESS_COST hook_int_rtx_0
491
492 #undef TARGET_VECTOR_OPAQUE_P
493 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
494
495 #undef TARGET_DWARF_REGISTER_SPAN
496 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
497
498 struct gcc_target targetm = TARGET_INITIALIZER;
499 \f
500 /* Override command line options. Mostly we process the processor
501 type and sometimes adjust other TARGET_ options. */
502
503 void
504 rs6000_override_options (default_cpu)
505 const char *default_cpu;
506 {
507 size_t i, j;
508 struct rs6000_cpu_select *ptr;
509
510 /* Simplify the entries below by making a mask for any POWER
511 variant and any PowerPC variant. */
512
513 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
514 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
515 | MASK_PPC_GFXOPT | MASK_POWERPC64)
516 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
517
518 static struct ptt
519 {
520 const char *const name; /* Canonical processor name. */
521 const enum processor_type processor; /* Processor type enum value. */
522 const int target_enable; /* Target flags to enable. */
523 const int target_disable; /* Target flags to disable. */
524 } const processor_target_table[]
525 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
526 POWER_MASKS | POWERPC_MASKS},
527 {"power", PROCESSOR_POWER,
528 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
529 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
530 {"power2", PROCESSOR_POWER,
531 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
532 POWERPC_MASKS | MASK_NEW_MNEMONICS},
533 {"power3", PROCESSOR_PPC630,
534 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
535 POWER_MASKS},
536 {"power4", PROCESSOR_POWER4,
537 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
538 POWER_MASKS},
539 {"powerpc", PROCESSOR_POWERPC,
540 MASK_POWERPC | MASK_NEW_MNEMONICS,
541 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
542 {"powerpc64", PROCESSOR_POWERPC64,
543 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
544 POWER_MASKS | POWERPC_OPT_MASKS},
545 {"rios", PROCESSOR_RIOS1,
546 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
547 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
548 {"rios1", PROCESSOR_RIOS1,
549 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
550 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
551 {"rsc", PROCESSOR_PPC601,
552 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
553 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
554 {"rsc1", PROCESSOR_PPC601,
555 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
556 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
557 {"rios2", PROCESSOR_RIOS2,
558 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
559 POWERPC_MASKS | MASK_NEW_MNEMONICS},
560 {"rs64a", PROCESSOR_RS64A,
561 MASK_POWERPC | MASK_NEW_MNEMONICS,
562 POWER_MASKS | POWERPC_OPT_MASKS},
563 {"401", PROCESSOR_PPC403,
564 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
565 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
566 {"403", PROCESSOR_PPC403,
567 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
568 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
569 {"405", PROCESSOR_PPC405,
570 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
571 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
572 {"405fp", PROCESSOR_PPC405,
573 MASK_POWERPC | MASK_NEW_MNEMONICS,
574 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
575 {"440", PROCESSOR_PPC440,
576 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
577 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
578 {"440fp", PROCESSOR_PPC440,
579 MASK_POWERPC | MASK_NEW_MNEMONICS,
580 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
581 {"505", PROCESSOR_MPCCORE,
582 MASK_POWERPC | MASK_NEW_MNEMONICS,
583 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
584 {"601", PROCESSOR_PPC601,
585 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
586 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
587 {"602", PROCESSOR_PPC603,
588 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
589 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
590 {"603", PROCESSOR_PPC603,
591 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
592 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
593 {"603e", PROCESSOR_PPC603,
594 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
595 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
596 {"ec603e", PROCESSOR_PPC603,
597 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
598 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
599 {"604", PROCESSOR_PPC604,
600 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
601 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
602 {"604e", PROCESSOR_PPC604e,
603 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
604 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
605 {"620", PROCESSOR_PPC620,
606 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
607 POWER_MASKS},
608 {"630", PROCESSOR_PPC630,
609 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
610 POWER_MASKS},
611 {"740", PROCESSOR_PPC750,
612 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
613 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
614 {"750", PROCESSOR_PPC750,
615 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
616 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
617 {"7400", PROCESSOR_PPC7400,
618 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
619 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
620 {"7450", PROCESSOR_PPC7450,
621 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
622 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
623 {"8540", PROCESSOR_PPC8540,
624 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
625 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
626 {"801", PROCESSOR_MPCCORE,
627 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
628 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
629 {"821", PROCESSOR_MPCCORE,
630 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
631 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
632 {"823", PROCESSOR_MPCCORE,
633 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
634 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
635 {"860", PROCESSOR_MPCCORE,
636 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
637 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
638 {"970", PROCESSOR_POWER4,
639 MASK_POWERPC | POWERPC_OPT_MASKS | MASK_NEW_MNEMONICS,
640 POWER_MASKS}};
641
642 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
643
644 /* Save current -mmultiple/-mno-multiple status. */
645 int multiple = TARGET_MULTIPLE;
646 /* Save current -mstring/-mno-string status. */
647 int string = TARGET_STRING;
648
649 /* Identify the processor type. */
650 rs6000_select[0].string = default_cpu;
651 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
652
653 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
654 {
655 ptr = &rs6000_select[i];
656 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
657 {
658 for (j = 0; j < ptt_size; j++)
659 if (! strcmp (ptr->string, processor_target_table[j].name))
660 {
661 if (ptr->set_tune_p)
662 rs6000_cpu = processor_target_table[j].processor;
663
664 if (ptr->set_arch_p)
665 {
666 target_flags |= processor_target_table[j].target_enable;
667 target_flags &= ~processor_target_table[j].target_disable;
668 }
669 break;
670 }
671
672 if (j == ptt_size)
673 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
674 }
675 }
676
677 if (TARGET_E500)
678 rs6000_isel = 1;
679
680 /* If we are optimizing big endian systems for space, use the load/store
681 multiple and string instructions. */
682 if (BYTES_BIG_ENDIAN && optimize_size)
683 target_flags |= MASK_MULTIPLE | MASK_STRING;
684
685 /* If -mmultiple or -mno-multiple was explicitly used, don't
686 override with the processor default */
687 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
688 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
689
690 /* If -mstring or -mno-string was explicitly used, don't override
691 with the processor default. */
692 if ((target_flags_explicit & MASK_STRING) != 0)
693 target_flags = (target_flags & ~MASK_STRING) | string;
694
695 /* Don't allow -mmultiple or -mstring on little endian systems
696 unless the cpu is a 750, because the hardware doesn't support the
697 instructions used in little endian mode, and causes an alignment
698 trap. The 750 does not cause an alignment trap (except when the
699 target is unaligned). */
700
701 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
702 {
703 if (TARGET_MULTIPLE)
704 {
705 target_flags &= ~MASK_MULTIPLE;
706 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
707 warning ("-mmultiple is not supported on little endian systems");
708 }
709
710 if (TARGET_STRING)
711 {
712 target_flags &= ~MASK_STRING;
713 if ((target_flags_explicit & MASK_STRING) != 0)
714 warning ("-mstring is not supported on little endian systems");
715 }
716 }
717
718 /* Set debug flags */
719 if (rs6000_debug_name)
720 {
721 if (! strcmp (rs6000_debug_name, "all"))
722 rs6000_debug_stack = rs6000_debug_arg = 1;
723 else if (! strcmp (rs6000_debug_name, "stack"))
724 rs6000_debug_stack = 1;
725 else if (! strcmp (rs6000_debug_name, "arg"))
726 rs6000_debug_arg = 1;
727 else
728 error ("unknown -mdebug-%s switch", rs6000_debug_name);
729 }
730
731 if (rs6000_traceback_name)
732 {
733 if (! strncmp (rs6000_traceback_name, "full", 4))
734 rs6000_traceback = traceback_full;
735 else if (! strncmp (rs6000_traceback_name, "part", 4))
736 rs6000_traceback = traceback_part;
737 else if (! strncmp (rs6000_traceback_name, "no", 2))
738 rs6000_traceback = traceback_none;
739 else
740 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
741 rs6000_traceback_name);
742 }
743
744 /* Set size of long double */
745 rs6000_long_double_type_size = 64;
746 if (rs6000_long_double_size_string)
747 {
748 char *tail;
749 int size = strtol (rs6000_long_double_size_string, &tail, 10);
750 if (*tail != '\0' || (size != 64 && size != 128))
751 error ("Unknown switch -mlong-double-%s",
752 rs6000_long_double_size_string);
753 else
754 rs6000_long_double_type_size = size;
755 }
756
757 /* Handle -mabi= options. */
758 rs6000_parse_abi_options ();
759
760 /* Handle -malign-XXXXX option. */
761 rs6000_parse_alignment_option ();
762
763 /* Handle generic -mFOO=YES/NO options. */
764 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string,
765 &rs6000_altivec_vrsave);
766 rs6000_parse_yes_no_option ("isel", rs6000_isel_string,
767 &rs6000_isel);
768 rs6000_parse_yes_no_option ("spe", rs6000_spe_string, &rs6000_spe);
769 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string,
770 &rs6000_float_gprs);
771
772 /* Handle -mtls-size option. */
773 rs6000_parse_tls_size_option ();
774
775 #ifdef SUBTARGET_OVERRIDE_OPTIONS
776 SUBTARGET_OVERRIDE_OPTIONS;
777 #endif
778 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
779 SUBSUBTARGET_OVERRIDE_OPTIONS;
780 #endif
781
782 if (TARGET_E500)
783 {
784 /* The e500 does not have string instructions, and we set
785 MASK_STRING above when optimizing for size. */
786 if ((target_flags & MASK_STRING) != 0)
787 target_flags = target_flags & ~MASK_STRING;
788
789 /* No SPE means 64-bit long doubles, even if an E500. */
790 if (rs6000_spe_string != 0
791 && !strcmp (rs6000_spe_string, "no"))
792 rs6000_long_double_type_size = 64;
793 }
794 else if (rs6000_select[1].string != NULL)
795 {
796 /* For the powerpc-eabispe configuration, we set all these by
797 default, so let's unset them if we manually set another
798 CPU that is not the E500. */
799 if (rs6000_abi_string == 0)
800 rs6000_spe_abi = 0;
801 if (rs6000_spe_string == 0)
802 rs6000_spe = 0;
803 if (rs6000_float_gprs_string == 0)
804 rs6000_float_gprs = 0;
805 if (rs6000_isel_string == 0)
806 rs6000_isel = 0;
807 if (rs6000_long_double_size_string == 0)
808 rs6000_long_double_type_size = 64;
809 }
810
811 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
812 using TARGET_OPTIONS to handle a toggle switch, but we're out of
813 bits in target_flags so TARGET_SWITCHES cannot be used.
814 Assumption here is that rs6000_longcall_switch points into the
815 text of the complete option, rather than being a copy, so we can
816 scan back for the presence or absence of the no- modifier. */
817 if (rs6000_longcall_switch)
818 {
819 const char *base = rs6000_longcall_switch;
820 while (base[-1] != 'm') base--;
821
822 if (*rs6000_longcall_switch != '\0')
823 error ("invalid option `%s'", base);
824 rs6000_default_long_calls = (base[0] != 'n');
825 }
826
827 #ifdef TARGET_REGNAMES
828 /* If the user desires alternate register names, copy in the
829 alternate names now. */
830 if (TARGET_REGNAMES)
831 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
832 #endif
833
834 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
835 If -maix-struct-return or -msvr4-struct-return was explicitly
836 used, don't override with the ABI default. */
837 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
838 {
839 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
840 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
841 else
842 target_flags |= MASK_AIX_STRUCT_RET;
843 }
844
845 if (TARGET_LONG_DOUBLE_128
846 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
847 real_format_for_mode[TFmode - QFmode] = &ibm_extended_format;
848
849 /* Allocate an alias set for register saves & restores from stack. */
850 rs6000_sr_alias_set = new_alias_set ();
851
852 if (TARGET_TOC)
853 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
854
855 /* We can only guarantee the availability of DI pseudo-ops when
856 assembling for 64-bit targets. */
857 if (!TARGET_64BIT)
858 {
859 targetm.asm_out.aligned_op.di = NULL;
860 targetm.asm_out.unaligned_op.di = NULL;
861 }
862
863 /* Set maximum branch target alignment at two instructions, eight bytes. */
864 align_jumps_max_skip = 8;
865 align_loops_max_skip = 8;
866
867 /* Arrange to save and restore machine status around nested functions. */
868 init_machine_status = rs6000_init_machine_status;
869 }
870
871 /* Handle generic options of the form -mfoo=yes/no.
872 NAME is the option name.
873 VALUE is the option value.
874 FLAG is the pointer to the flag where to store a 1 or 0, depending on
875 whether the option value is 'yes' or 'no' respectively. */
876 static void
877 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
878 {
879 if (value == 0)
880 return;
881 else if (!strcmp (value, "yes"))
882 *flag = 1;
883 else if (!strcmp (value, "no"))
884 *flag = 0;
885 else
886 error ("unknown -m%s= option specified: '%s'", name, value);
887 }
888
889 /* Handle -mabi= options. */
890 static void
891 rs6000_parse_abi_options ()
892 {
893 if (rs6000_abi_string == 0)
894 return;
895 else if (! strcmp (rs6000_abi_string, "altivec"))
896 rs6000_altivec_abi = 1;
897 else if (! strcmp (rs6000_abi_string, "no-altivec"))
898 rs6000_altivec_abi = 0;
899 else if (! strcmp (rs6000_abi_string, "spe"))
900 {
901 rs6000_spe_abi = 1;
902 if (!TARGET_SPE_ABI)
903 error ("not configured for ABI: '%s'", rs6000_abi_string);
904 }
905
906 else if (! strcmp (rs6000_abi_string, "no-spe"))
907 rs6000_spe_abi = 0;
908 else
909 error ("unknown ABI specified: '%s'", rs6000_abi_string);
910 }
911
912 /* Handle -malign-XXXXXX options. */
913 static void
914 rs6000_parse_alignment_option ()
915 {
916 if (rs6000_alignment_string == 0
917 || ! strcmp (rs6000_alignment_string, "power"))
918 rs6000_alignment_flags = MASK_ALIGN_POWER;
919 else if (! strcmp (rs6000_alignment_string, "natural"))
920 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
921 else
922 error ("unknown -malign-XXXXX option specified: '%s'",
923 rs6000_alignment_string);
924 }
925
926 /* Validate and record the size specified with the -mtls-size option. */
927
928 static void
929 rs6000_parse_tls_size_option ()
930 {
931 if (rs6000_tls_size_string == 0)
932 return;
933 else if (strcmp (rs6000_tls_size_string, "16") == 0)
934 rs6000_tls_size = 16;
935 else if (strcmp (rs6000_tls_size_string, "32") == 0)
936 rs6000_tls_size = 32;
937 else if (strcmp (rs6000_tls_size_string, "64") == 0)
938 rs6000_tls_size = 64;
939 else
940 error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string);
941 }
942
943 void
944 optimization_options (level, size)
945 int level ATTRIBUTE_UNUSED;
946 int size ATTRIBUTE_UNUSED;
947 {
948 }
949 \f
950 /* Do anything needed at the start of the asm file. */
951
952 static void
953 rs6000_file_start ()
954 {
955 size_t i;
956 char buffer[80];
957 const char *start = buffer;
958 struct rs6000_cpu_select *ptr;
959 const char *default_cpu = TARGET_CPU_DEFAULT;
960 FILE *file = asm_out_file;
961
962 default_file_start ();
963
964 #ifdef TARGET_BI_ARCH
965 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
966 default_cpu = 0;
967 #endif
968
969 if (flag_verbose_asm)
970 {
971 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
972 rs6000_select[0].string = default_cpu;
973
974 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
975 {
976 ptr = &rs6000_select[i];
977 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
978 {
979 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
980 start = "";
981 }
982 }
983
984 #ifdef USING_ELFOS_H
985 switch (rs6000_sdata)
986 {
987 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
988 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
989 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
990 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
991 }
992
993 if (rs6000_sdata && g_switch_value)
994 {
995 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
996 g_switch_value);
997 start = "";
998 }
999 #endif
1000
1001 if (*start == '\0')
1002 putc ('\n', file);
1003 }
1004 }
1005 \f
1006 /* Return nonzero if this function is known to have a null epilogue. */
1007
1008 int
1009 direct_return ()
1010 {
1011 if (reload_completed)
1012 {
1013 rs6000_stack_t *info = rs6000_stack_info ();
1014
1015 if (info->first_gp_reg_save == 32
1016 && info->first_fp_reg_save == 64
1017 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
1018 && ! info->lr_save_p
1019 && ! info->cr_save_p
1020 && info->vrsave_mask == 0
1021 && ! info->push_p)
1022 return 1;
1023 }
1024
1025 return 0;
1026 }
1027
1028 /* Returns 1 always. */
1029
1030 int
1031 any_operand (op, mode)
1032 rtx op ATTRIBUTE_UNUSED;
1033 enum machine_mode mode ATTRIBUTE_UNUSED;
1034 {
1035 return 1;
1036 }
1037
1038 /* Returns 1 if op is the count register. */
1039 int
1040 count_register_operand (op, mode)
1041 rtx op;
1042 enum machine_mode mode ATTRIBUTE_UNUSED;
1043 {
1044 if (GET_CODE (op) != REG)
1045 return 0;
1046
1047 if (REGNO (op) == COUNT_REGISTER_REGNUM)
1048 return 1;
1049
1050 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
1051 return 1;
1052
1053 return 0;
1054 }
1055
1056 /* Returns 1 if op is an altivec register. */
1057 int
1058 altivec_register_operand (op, mode)
1059 rtx op;
1060 enum machine_mode mode ATTRIBUTE_UNUSED;
1061 {
1062
1063 return (register_operand (op, mode)
1064 && (GET_CODE (op) != REG
1065 || REGNO (op) > FIRST_PSEUDO_REGISTER
1066 || ALTIVEC_REGNO_P (REGNO (op))));
1067 }
1068
1069 int
1070 xer_operand (op, mode)
1071 rtx op;
1072 enum machine_mode mode ATTRIBUTE_UNUSED;
1073 {
1074 if (GET_CODE (op) != REG)
1075 return 0;
1076
1077 if (XER_REGNO_P (REGNO (op)))
1078 return 1;
1079
1080 return 0;
1081 }
1082
1083 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
1084 by such constants completes more quickly. */
1085
1086 int
1087 s8bit_cint_operand (op, mode)
1088 rtx op;
1089 enum machine_mode mode ATTRIBUTE_UNUSED;
1090 {
1091 return ( GET_CODE (op) == CONST_INT
1092 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
1093 }
1094
1095 /* Return 1 if OP is a constant that can fit in a D field. */
1096
1097 int
1098 short_cint_operand (op, mode)
1099 rtx op;
1100 enum machine_mode mode ATTRIBUTE_UNUSED;
1101 {
1102 return (GET_CODE (op) == CONST_INT
1103 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
1104 }
1105
1106 /* Similar for an unsigned D field. */
1107
1108 int
1109 u_short_cint_operand (op, mode)
1110 rtx op;
1111 enum machine_mode mode ATTRIBUTE_UNUSED;
1112 {
1113 return (GET_CODE (op) == CONST_INT
1114 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
1115 }
1116
1117 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1118
1119 int
1120 non_short_cint_operand (op, mode)
1121 rtx op;
1122 enum machine_mode mode ATTRIBUTE_UNUSED;
1123 {
1124 return (GET_CODE (op) == CONST_INT
1125 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1126 }
1127
1128 /* Returns 1 if OP is a CONST_INT that is a positive value
1129 and an exact power of 2. */
1130
1131 int
1132 exact_log2_cint_operand (op, mode)
1133 rtx op;
1134 enum machine_mode mode ATTRIBUTE_UNUSED;
1135 {
1136 return (GET_CODE (op) == CONST_INT
1137 && INTVAL (op) > 0
1138 && exact_log2 (INTVAL (op)) >= 0);
1139 }
1140
1141 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1142 ctr, or lr). */
1143
1144 int
1145 gpc_reg_operand (op, mode)
1146 rtx op;
1147 enum machine_mode mode;
1148 {
1149 return (register_operand (op, mode)
1150 && (GET_CODE (op) != REG
1151 || (REGNO (op) >= ARG_POINTER_REGNUM
1152 && !XER_REGNO_P (REGNO (op)))
1153 || REGNO (op) < MQ_REGNO));
1154 }
1155
1156 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1157 CR field. */
1158
1159 int
1160 cc_reg_operand (op, mode)
1161 rtx op;
1162 enum machine_mode mode;
1163 {
1164 return (register_operand (op, mode)
1165 && (GET_CODE (op) != REG
1166 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1167 || CR_REGNO_P (REGNO (op))));
1168 }
1169
1170 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1171 CR field that isn't CR0. */
1172
1173 int
1174 cc_reg_not_cr0_operand (op, mode)
1175 rtx op;
1176 enum machine_mode mode;
1177 {
1178 return (register_operand (op, mode)
1179 && (GET_CODE (op) != REG
1180 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1181 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1182 }
1183
1184 /* Returns 1 if OP is either a constant integer valid for a D-field or
1185 a non-special register. If a register, it must be in the proper
1186 mode unless MODE is VOIDmode. */
1187
1188 int
1189 reg_or_short_operand (op, mode)
1190 rtx op;
1191 enum machine_mode mode;
1192 {
1193 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1194 }
1195
1196 /* Similar, except check if the negation of the constant would be
1197 valid for a D-field. */
1198
1199 int
1200 reg_or_neg_short_operand (op, mode)
1201 rtx op;
1202 enum machine_mode mode;
1203 {
1204 if (GET_CODE (op) == CONST_INT)
1205 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1206
1207 return gpc_reg_operand (op, mode);
1208 }
1209
1210 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1211 a non-special register. If a register, it must be in the proper
1212 mode unless MODE is VOIDmode. */
1213
1214 int
1215 reg_or_aligned_short_operand (op, mode)
1216 rtx op;
1217 enum machine_mode mode;
1218 {
1219 if (gpc_reg_operand (op, mode))
1220 return 1;
1221 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1222 return 1;
1223
1224 return 0;
1225 }
1226
1227
1228 /* Return 1 if the operand is either a register or an integer whose
1229 high-order 16 bits are zero. */
1230
1231 int
1232 reg_or_u_short_operand (op, mode)
1233 rtx op;
1234 enum machine_mode mode;
1235 {
1236 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1237 }
1238
1239 /* Return 1 is the operand is either a non-special register or ANY
1240 constant integer. */
1241
1242 int
1243 reg_or_cint_operand (op, mode)
1244 rtx op;
1245 enum machine_mode mode;
1246 {
1247 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1248 }
1249
1250 /* Return 1 is the operand is either a non-special register or ANY
1251 32-bit signed constant integer. */
1252
1253 int
1254 reg_or_arith_cint_operand (op, mode)
1255 rtx op;
1256 enum machine_mode mode;
1257 {
1258 return (gpc_reg_operand (op, mode)
1259 || (GET_CODE (op) == CONST_INT
1260 #if HOST_BITS_PER_WIDE_INT != 32
1261 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1262 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1263 #endif
1264 ));
1265 }
1266
1267 /* Return 1 is the operand is either a non-special register or a 32-bit
1268 signed constant integer valid for 64-bit addition. */
1269
1270 int
1271 reg_or_add_cint64_operand (op, mode)
1272 rtx op;
1273 enum machine_mode mode;
1274 {
1275 return (gpc_reg_operand (op, mode)
1276 || (GET_CODE (op) == CONST_INT
1277 #if HOST_BITS_PER_WIDE_INT == 32
1278 && INTVAL (op) < 0x7fff8000
1279 #else
1280 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1281 < 0x100000000ll)
1282 #endif
1283 ));
1284 }
1285
1286 /* Return 1 is the operand is either a non-special register or a 32-bit
1287 signed constant integer valid for 64-bit subtraction. */
1288
1289 int
1290 reg_or_sub_cint64_operand (op, mode)
1291 rtx op;
1292 enum machine_mode mode;
1293 {
1294 return (gpc_reg_operand (op, mode)
1295 || (GET_CODE (op) == CONST_INT
1296 #if HOST_BITS_PER_WIDE_INT == 32
1297 && (- INTVAL (op)) < 0x7fff8000
1298 #else
1299 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1300 < 0x100000000ll)
1301 #endif
1302 ));
1303 }
1304
1305 /* Return 1 is the operand is either a non-special register or ANY
1306 32-bit unsigned constant integer. */
1307
1308 int
1309 reg_or_logical_cint_operand (op, mode)
1310 rtx op;
1311 enum machine_mode mode;
1312 {
1313 if (GET_CODE (op) == CONST_INT)
1314 {
1315 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1316 {
1317 if (GET_MODE_BITSIZE (mode) <= 32)
1318 abort ();
1319
1320 if (INTVAL (op) < 0)
1321 return 0;
1322 }
1323
1324 return ((INTVAL (op) & GET_MODE_MASK (mode)
1325 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1326 }
1327 else if (GET_CODE (op) == CONST_DOUBLE)
1328 {
1329 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1330 || mode != DImode)
1331 abort ();
1332
1333 return CONST_DOUBLE_HIGH (op) == 0;
1334 }
1335 else
1336 return gpc_reg_operand (op, mode);
1337 }
1338
1339 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1340
1341 int
1342 got_operand (op, mode)
1343 rtx op;
1344 enum machine_mode mode ATTRIBUTE_UNUSED;
1345 {
1346 return (GET_CODE (op) == SYMBOL_REF
1347 || GET_CODE (op) == CONST
1348 || GET_CODE (op) == LABEL_REF);
1349 }
1350
1351 /* Return 1 if the operand is a simple references that can be loaded via
1352 the GOT (labels involving addition aren't allowed). */
1353
1354 int
1355 got_no_const_operand (op, mode)
1356 rtx op;
1357 enum machine_mode mode ATTRIBUTE_UNUSED;
1358 {
1359 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1360 }
1361
1362 /* Return the number of instructions it takes to form a constant in an
1363 integer register. */
1364
1365 static int
1366 num_insns_constant_wide (value)
1367 HOST_WIDE_INT value;
1368 {
1369 /* signed constant loadable with {cal|addi} */
1370 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1371 return 1;
1372
1373 /* constant loadable with {cau|addis} */
1374 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1375 return 1;
1376
1377 #if HOST_BITS_PER_WIDE_INT == 64
1378 else if (TARGET_POWERPC64)
1379 {
1380 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1381 HOST_WIDE_INT high = value >> 31;
1382
1383 if (high == 0 || high == -1)
1384 return 2;
1385
1386 high >>= 1;
1387
1388 if (low == 0)
1389 return num_insns_constant_wide (high) + 1;
1390 else
1391 return (num_insns_constant_wide (high)
1392 + num_insns_constant_wide (low) + 1);
1393 }
1394 #endif
1395
1396 else
1397 return 2;
1398 }
1399
1400 int
1401 num_insns_constant (op, mode)
1402 rtx op;
1403 enum machine_mode mode;
1404 {
1405 if (GET_CODE (op) == CONST_INT)
1406 {
1407 #if HOST_BITS_PER_WIDE_INT == 64
1408 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1409 && mask64_operand (op, mode))
1410 return 2;
1411 else
1412 #endif
1413 return num_insns_constant_wide (INTVAL (op));
1414 }
1415
1416 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1417 {
1418 long l;
1419 REAL_VALUE_TYPE rv;
1420
1421 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1422 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1423 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1424 }
1425
1426 else if (GET_CODE (op) == CONST_DOUBLE)
1427 {
1428 HOST_WIDE_INT low;
1429 HOST_WIDE_INT high;
1430 long l[2];
1431 REAL_VALUE_TYPE rv;
1432 int endian = (WORDS_BIG_ENDIAN == 0);
1433
1434 if (mode == VOIDmode || mode == DImode)
1435 {
1436 high = CONST_DOUBLE_HIGH (op);
1437 low = CONST_DOUBLE_LOW (op);
1438 }
1439 else
1440 {
1441 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1442 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1443 high = l[endian];
1444 low = l[1 - endian];
1445 }
1446
1447 if (TARGET_32BIT)
1448 return (num_insns_constant_wide (low)
1449 + num_insns_constant_wide (high));
1450
1451 else
1452 {
1453 if (high == 0 && low >= 0)
1454 return num_insns_constant_wide (low);
1455
1456 else if (high == -1 && low < 0)
1457 return num_insns_constant_wide (low);
1458
1459 else if (mask64_operand (op, mode))
1460 return 2;
1461
1462 else if (low == 0)
1463 return num_insns_constant_wide (high) + 1;
1464
1465 else
1466 return (num_insns_constant_wide (high)
1467 + num_insns_constant_wide (low) + 1);
1468 }
1469 }
1470
1471 else
1472 abort ();
1473 }
1474
1475 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1476 register with one instruction per word. We only do this if we can
1477 safely read CONST_DOUBLE_{LOW,HIGH}. */
1478
1479 int
1480 easy_fp_constant (op, mode)
1481 rtx op;
1482 enum machine_mode mode;
1483 {
1484 if (GET_CODE (op) != CONST_DOUBLE
1485 || GET_MODE (op) != mode
1486 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1487 return 0;
1488
1489 /* Consider all constants with -msoft-float to be easy. */
1490 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1491 && mode != DImode)
1492 return 1;
1493
1494 /* If we are using V.4 style PIC, consider all constants to be hard. */
1495 if (flag_pic && DEFAULT_ABI == ABI_V4)
1496 return 0;
1497
1498 #ifdef TARGET_RELOCATABLE
1499 /* Similarly if we are using -mrelocatable, consider all constants
1500 to be hard. */
1501 if (TARGET_RELOCATABLE)
1502 return 0;
1503 #endif
1504
1505 if (mode == TFmode)
1506 {
1507 long k[4];
1508 REAL_VALUE_TYPE rv;
1509
1510 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1511 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1512
1513 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1514 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1515 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1516 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1517 }
1518
1519 else if (mode == DFmode)
1520 {
1521 long k[2];
1522 REAL_VALUE_TYPE rv;
1523
1524 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1525 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1526
1527 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1528 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1529 }
1530
1531 else if (mode == SFmode)
1532 {
1533 long l;
1534 REAL_VALUE_TYPE rv;
1535
1536 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1537 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1538
1539 return num_insns_constant_wide (l) == 1;
1540 }
1541
1542 else if (mode == DImode)
1543 return ((TARGET_POWERPC64
1544 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1545 || (num_insns_constant (op, DImode) <= 2));
1546
1547 else if (mode == SImode)
1548 return 1;
1549 else
1550 abort ();
1551 }
1552
1553 /* Return nonzero if all elements of a vector have the same value. */
1554
1555 static int
1556 easy_vector_same (op, mode)
1557 rtx op;
1558 enum machine_mode mode ATTRIBUTE_UNUSED;
1559 {
1560 int units, i, cst;
1561
1562 units = CONST_VECTOR_NUNITS (op);
1563
1564 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1565 for (i = 1; i < units; ++i)
1566 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
1567 break;
1568 if (i == units)
1569 return 1;
1570 return 0;
1571 }
1572
1573 /* Return 1 if the operand is a CONST_INT and can be put into a
1574 register without using memory. */
1575
1576 int
1577 easy_vector_constant (op, mode)
1578 rtx op;
1579 enum machine_mode mode;
1580 {
1581 int cst, cst2;
1582
1583 if (GET_CODE (op) != CONST_VECTOR
1584 || (!TARGET_ALTIVEC
1585 && !TARGET_SPE))
1586 return 0;
1587
1588 if (zero_constant (op, mode)
1589 && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
1590 || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
1591 return 1;
1592
1593 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
1594 return 0;
1595
1596 if (TARGET_SPE && mode == V1DImode)
1597 return 0;
1598
1599 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1600 cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
1601
1602 /* Limit SPE vectors to 15 bits signed. These we can generate with:
1603 li r0, CONSTANT1
1604 evmergelo r0, r0, r0
1605 li r0, CONSTANT2
1606
1607 I don't know how efficient it would be to allow bigger constants,
1608 considering we'll have an extra 'ori' for every 'li'. I doubt 5
1609 instructions is better than a 64-bit memory load, but I don't
1610 have the e500 timing specs. */
1611 if (TARGET_SPE && mode == V2SImode
1612 && cst >= -0x7fff && cst <= 0x7fff
1613 && cst2 >= -0x7fff && cst2 <= 0x7fff)
1614 return 1;
1615
1616 if (TARGET_ALTIVEC && EASY_VECTOR_15 (cst, op, mode))
1617 return 1;
1618
1619 if (TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode))
1620 return 1;
1621
1622 return 0;
1623 }
1624
1625 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
1626
1627 int
1628 easy_vector_constant_add_self (op, mode)
1629 rtx op;
1630 enum machine_mode mode;
1631 {
1632 int cst;
1633
1634 if (!easy_vector_constant (op, mode))
1635 return 0;
1636
1637 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1638
1639 return TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode);
1640 }
1641
1642 const char *
1643 output_vec_const_move (operands)
1644 rtx *operands;
1645 {
1646 int cst, cst2;
1647 enum machine_mode mode;
1648 rtx dest, vec;
1649
1650 dest = operands[0];
1651 vec = operands[1];
1652
1653 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
1654 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
1655 mode = GET_MODE (dest);
1656
1657 if (TARGET_ALTIVEC)
1658 {
1659 if (zero_constant (vec, mode))
1660 return "vxor %0,%0,%0";
1661 else if (EASY_VECTOR_15 (cst, vec, mode))
1662 {
1663 operands[1] = GEN_INT (cst);
1664 switch (mode)
1665 {
1666 case V4SImode:
1667 return "vspltisw %0,%1";
1668 case V8HImode:
1669 return "vspltish %0,%1";
1670 case V16QImode:
1671 return "vspltisb %0,%1";
1672 default:
1673 abort ();
1674 }
1675 }
1676 else if (EASY_VECTOR_15_ADD_SELF (cst, vec, mode))
1677 return "#";
1678 else
1679 abort ();
1680 }
1681
1682 if (TARGET_SPE)
1683 {
1684 /* Vector constant 0 is handled as a splitter of V2SI, and in the
1685 pattern of V1DI, V4HI, and V2SF.
1686
1687 FIXME: We should probably return # and add post reload
1688 splitters for these, but this way is so easy ;-).
1689 */
1690 operands[1] = GEN_INT (cst);
1691 operands[2] = GEN_INT (cst2);
1692 if (cst == cst2)
1693 return "li %0,%1\n\tevmergelo %0,%0,%0";
1694 else
1695 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1696 }
1697
1698 abort ();
1699 }
1700
1701 /* Return 1 if the operand is the constant 0. This works for scalars
1702 as well as vectors. */
1703 int
1704 zero_constant (op, mode)
1705 rtx op;
1706 enum machine_mode mode;
1707 {
1708 return op == CONST0_RTX (mode);
1709 }
1710
1711 /* Return 1 if the operand is 0.0. */
1712 int
1713 zero_fp_constant (op, mode)
1714 rtx op;
1715 enum machine_mode mode;
1716 {
1717 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1718 }
1719
1720 /* Return 1 if the operand is in volatile memory. Note that during
1721 the RTL generation phase, memory_operand does not return TRUE for
1722 volatile memory references. So this function allows us to
1723 recognize volatile references where its safe. */
1724
1725 int
1726 volatile_mem_operand (op, mode)
1727 rtx op;
1728 enum machine_mode mode;
1729 {
1730 if (GET_CODE (op) != MEM)
1731 return 0;
1732
1733 if (!MEM_VOLATILE_P (op))
1734 return 0;
1735
1736 if (mode != GET_MODE (op))
1737 return 0;
1738
1739 if (reload_completed)
1740 return memory_operand (op, mode);
1741
1742 if (reload_in_progress)
1743 return strict_memory_address_p (mode, XEXP (op, 0));
1744
1745 return memory_address_p (mode, XEXP (op, 0));
1746 }
1747
1748 /* Return 1 if the operand is an offsettable memory operand. */
1749
1750 int
1751 offsettable_mem_operand (op, mode)
1752 rtx op;
1753 enum machine_mode mode;
1754 {
1755 return ((GET_CODE (op) == MEM)
1756 && offsettable_address_p (reload_completed || reload_in_progress,
1757 mode, XEXP (op, 0)));
1758 }
1759
1760 /* Return 1 if the operand is either an easy FP constant (see above) or
1761 memory. */
1762
1763 int
1764 mem_or_easy_const_operand (op, mode)
1765 rtx op;
1766 enum machine_mode mode;
1767 {
1768 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1769 }
1770
1771 /* Return 1 if the operand is either a non-special register or an item
1772 that can be used as the operand of a `mode' add insn. */
1773
1774 int
1775 add_operand (op, mode)
1776 rtx op;
1777 enum machine_mode mode;
1778 {
1779 if (GET_CODE (op) == CONST_INT)
1780 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1781 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1782
1783 return gpc_reg_operand (op, mode);
1784 }
1785
1786 /* Return 1 if OP is a constant but not a valid add_operand. */
1787
1788 int
1789 non_add_cint_operand (op, mode)
1790 rtx op;
1791 enum machine_mode mode ATTRIBUTE_UNUSED;
1792 {
1793 return (GET_CODE (op) == CONST_INT
1794 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1795 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1796 }
1797
1798 /* Return 1 if the operand is a non-special register or a constant that
1799 can be used as the operand of an OR or XOR insn on the RS/6000. */
1800
1801 int
1802 logical_operand (op, mode)
1803 rtx op;
1804 enum machine_mode mode;
1805 {
1806 HOST_WIDE_INT opl, oph;
1807
1808 if (gpc_reg_operand (op, mode))
1809 return 1;
1810
1811 if (GET_CODE (op) == CONST_INT)
1812 {
1813 opl = INTVAL (op) & GET_MODE_MASK (mode);
1814
1815 #if HOST_BITS_PER_WIDE_INT <= 32
1816 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1817 return 0;
1818 #endif
1819 }
1820 else if (GET_CODE (op) == CONST_DOUBLE)
1821 {
1822 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1823 abort ();
1824
1825 opl = CONST_DOUBLE_LOW (op);
1826 oph = CONST_DOUBLE_HIGH (op);
1827 if (oph != 0)
1828 return 0;
1829 }
1830 else
1831 return 0;
1832
1833 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1834 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1835 }
1836
1837 /* Return 1 if C is a constant that is not a logical operand (as
1838 above), but could be split into one. */
1839
1840 int
1841 non_logical_cint_operand (op, mode)
1842 rtx op;
1843 enum machine_mode mode;
1844 {
1845 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1846 && ! logical_operand (op, mode)
1847 && reg_or_logical_cint_operand (op, mode));
1848 }
1849
1850 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1851 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1852 Reject all ones and all zeros, since these should have been optimized
1853 away and confuse the making of MB and ME. */
1854
1855 int
1856 mask_operand (op, mode)
1857 rtx op;
1858 enum machine_mode mode ATTRIBUTE_UNUSED;
1859 {
1860 HOST_WIDE_INT c, lsb;
1861
1862 if (GET_CODE (op) != CONST_INT)
1863 return 0;
1864
1865 c = INTVAL (op);
1866
1867 /* Fail in 64-bit mode if the mask wraps around because the upper
1868 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1869 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1870 return 0;
1871
1872 /* We don't change the number of transitions by inverting,
1873 so make sure we start with the LS bit zero. */
1874 if (c & 1)
1875 c = ~c;
1876
1877 /* Reject all zeros or all ones. */
1878 if (c == 0)
1879 return 0;
1880
1881 /* Find the first transition. */
1882 lsb = c & -c;
1883
1884 /* Invert to look for a second transition. */
1885 c = ~c;
1886
1887 /* Erase first transition. */
1888 c &= -lsb;
1889
1890 /* Find the second transition (if any). */
1891 lsb = c & -c;
1892
1893 /* Match if all the bits above are 1's (or c is zero). */
1894 return c == -lsb;
1895 }
1896
1897 /* Return 1 for the PowerPC64 rlwinm corner case. */
1898
1899 int
1900 mask_operand_wrap (op, mode)
1901 rtx op;
1902 enum machine_mode mode ATTRIBUTE_UNUSED;
1903 {
1904 HOST_WIDE_INT c, lsb;
1905
1906 if (GET_CODE (op) != CONST_INT)
1907 return 0;
1908
1909 c = INTVAL (op);
1910
1911 if ((c & 0x80000001) != 0x80000001)
1912 return 0;
1913
1914 c = ~c;
1915 if (c == 0)
1916 return 0;
1917
1918 lsb = c & -c;
1919 c = ~c;
1920 c &= -lsb;
1921 lsb = c & -c;
1922 return c == -lsb;
1923 }
1924
1925 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1926 It is if there are no more than one 1->0 or 0->1 transitions.
1927 Reject all zeros, since zero should have been optimized away and
1928 confuses the making of MB and ME. */
1929
1930 int
1931 mask64_operand (op, mode)
1932 rtx op;
1933 enum machine_mode mode ATTRIBUTE_UNUSED;
1934 {
1935 if (GET_CODE (op) == CONST_INT)
1936 {
1937 HOST_WIDE_INT c, lsb;
1938
1939 c = INTVAL (op);
1940
1941 /* Reject all zeros. */
1942 if (c == 0)
1943 return 0;
1944
1945 /* We don't change the number of transitions by inverting,
1946 so make sure we start with the LS bit zero. */
1947 if (c & 1)
1948 c = ~c;
1949
1950 /* Find the transition, and check that all bits above are 1's. */
1951 lsb = c & -c;
1952
1953 /* Match if all the bits above are 1's (or c is zero). */
1954 return c == -lsb;
1955 }
1956 return 0;
1957 }
1958
1959 /* Like mask64_operand, but allow up to three transitions. This
1960 predicate is used by insn patterns that generate two rldicl or
1961 rldicr machine insns. */
1962
1963 int
1964 mask64_2_operand (op, mode)
1965 rtx op;
1966 enum machine_mode mode ATTRIBUTE_UNUSED;
1967 {
1968 if (GET_CODE (op) == CONST_INT)
1969 {
1970 HOST_WIDE_INT c, lsb;
1971
1972 c = INTVAL (op);
1973
1974 /* Disallow all zeros. */
1975 if (c == 0)
1976 return 0;
1977
1978 /* We don't change the number of transitions by inverting,
1979 so make sure we start with the LS bit zero. */
1980 if (c & 1)
1981 c = ~c;
1982
1983 /* Find the first transition. */
1984 lsb = c & -c;
1985
1986 /* Invert to look for a second transition. */
1987 c = ~c;
1988
1989 /* Erase first transition. */
1990 c &= -lsb;
1991
1992 /* Find the second transition. */
1993 lsb = c & -c;
1994
1995 /* Invert to look for a third transition. */
1996 c = ~c;
1997
1998 /* Erase second transition. */
1999 c &= -lsb;
2000
2001 /* Find the third transition (if any). */
2002 lsb = c & -c;
2003
2004 /* Match if all the bits above are 1's (or c is zero). */
2005 return c == -lsb;
2006 }
2007 return 0;
2008 }
2009
2010 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
2011 implement ANDing by the mask IN. */
2012 void
2013 build_mask64_2_operands (in, out)
2014 rtx in;
2015 rtx *out;
2016 {
2017 #if HOST_BITS_PER_WIDE_INT >= 64
2018 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2019 int shift;
2020
2021 if (GET_CODE (in) != CONST_INT)
2022 abort ();
2023
2024 c = INTVAL (in);
2025 if (c & 1)
2026 {
2027 /* Assume c initially something like 0x00fff000000fffff. The idea
2028 is to rotate the word so that the middle ^^^^^^ group of zeros
2029 is at the MS end and can be cleared with an rldicl mask. We then
2030 rotate back and clear off the MS ^^ group of zeros with a
2031 second rldicl. */
2032 c = ~c; /* c == 0xff000ffffff00000 */
2033 lsb = c & -c; /* lsb == 0x0000000000100000 */
2034 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2035 c = ~c; /* c == 0x00fff000000fffff */
2036 c &= -lsb; /* c == 0x00fff00000000000 */
2037 lsb = c & -c; /* lsb == 0x0000100000000000 */
2038 c = ~c; /* c == 0xff000fffffffffff */
2039 c &= -lsb; /* c == 0xff00000000000000 */
2040 shift = 0;
2041 while ((lsb >>= 1) != 0)
2042 shift++; /* shift == 44 on exit from loop */
2043 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2044 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2045 m2 = ~c; /* m2 == 0x00ffffffffffffff */
2046 }
2047 else
2048 {
2049 /* Assume c initially something like 0xff000f0000000000. The idea
2050 is to rotate the word so that the ^^^ middle group of zeros
2051 is at the LS end and can be cleared with an rldicr mask. We then
2052 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2053 a second rldicr. */
2054 lsb = c & -c; /* lsb == 0x0000010000000000 */
2055 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2056 c = ~c; /* c == 0x00fff0ffffffffff */
2057 c &= -lsb; /* c == 0x00fff00000000000 */
2058 lsb = c & -c; /* lsb == 0x0000100000000000 */
2059 c = ~c; /* c == 0xff000fffffffffff */
2060 c &= -lsb; /* c == 0xff00000000000000 */
2061 shift = 0;
2062 while ((lsb >>= 1) != 0)
2063 shift++; /* shift == 44 on exit from loop */
2064 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2065 m1 >>= shift; /* m1 == 0x0000000000000fff */
2066 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2067 }
2068
2069 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2070 masks will be all 1's. We are guaranteed more than one transition. */
2071 out[0] = GEN_INT (64 - shift);
2072 out[1] = GEN_INT (m1);
2073 out[2] = GEN_INT (shift);
2074 out[3] = GEN_INT (m2);
2075 #else
2076 (void)in;
2077 (void)out;
2078 abort ();
2079 #endif
2080 }
2081
2082 /* Return 1 if the operand is either a non-special register or a constant
2083 that can be used as the operand of a PowerPC64 logical AND insn. */
2084
2085 int
2086 and64_operand (op, mode)
2087 rtx op;
2088 enum machine_mode mode;
2089 {
2090 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2091 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
2092
2093 return (logical_operand (op, mode) || mask64_operand (op, mode));
2094 }
2095
2096 /* Like the above, but also match constants that can be implemented
2097 with two rldicl or rldicr insns. */
2098
2099 int
2100 and64_2_operand (op, mode)
2101 rtx op;
2102 enum machine_mode mode;
2103 {
2104 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2105 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
2106
2107 return logical_operand (op, mode) || mask64_2_operand (op, mode);
2108 }
2109
2110 /* Return 1 if the operand is either a non-special register or a
2111 constant that can be used as the operand of an RS/6000 logical AND insn. */
2112
2113 int
2114 and_operand (op, mode)
2115 rtx op;
2116 enum machine_mode mode;
2117 {
2118 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2119 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
2120
2121 return (logical_operand (op, mode) || mask_operand (op, mode));
2122 }
2123
2124 /* Return 1 if the operand is a general register or memory operand. */
2125
2126 int
2127 reg_or_mem_operand (op, mode)
2128 rtx op;
2129 enum machine_mode mode;
2130 {
2131 return (gpc_reg_operand (op, mode)
2132 || memory_operand (op, mode)
2133 || volatile_mem_operand (op, mode));
2134 }
2135
2136 /* Return 1 if the operand is a general register or memory operand without
2137 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2138 instruction. */
2139
2140 int
2141 lwa_operand (op, mode)
2142 rtx op;
2143 enum machine_mode mode;
2144 {
2145 rtx inner = op;
2146
2147 if (reload_completed && GET_CODE (inner) == SUBREG)
2148 inner = SUBREG_REG (inner);
2149
2150 return gpc_reg_operand (inner, mode)
2151 || (memory_operand (inner, mode)
2152 && GET_CODE (XEXP (inner, 0)) != PRE_INC
2153 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
2154 && (GET_CODE (XEXP (inner, 0)) != PLUS
2155 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2156 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2157 }
2158
2159 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2160
2161 int
2162 symbol_ref_operand (op, mode)
2163 rtx op;
2164 enum machine_mode mode;
2165 {
2166 if (mode != VOIDmode && GET_MODE (op) != mode)
2167 return 0;
2168
2169 return (GET_CODE (op) == SYMBOL_REF
2170 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op)));
2171 }
2172
2173 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2174 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2175
2176 int
2177 call_operand (op, mode)
2178 rtx op;
2179 enum machine_mode mode;
2180 {
2181 if (mode != VOIDmode && GET_MODE (op) != mode)
2182 return 0;
2183
2184 return (GET_CODE (op) == SYMBOL_REF
2185 || (GET_CODE (op) == REG
2186 && (REGNO (op) == LINK_REGISTER_REGNUM
2187 || REGNO (op) == COUNT_REGISTER_REGNUM
2188 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2189 }
2190
2191 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2192 this file. */
2193
2194 int
2195 current_file_function_operand (op, mode)
2196 rtx op;
2197 enum machine_mode mode ATTRIBUTE_UNUSED;
2198 {
2199 return (GET_CODE (op) == SYMBOL_REF
2200 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op))
2201 && (SYMBOL_REF_LOCAL_P (op)
2202 || (op == XEXP (DECL_RTL (current_function_decl), 0))));
2203 }
2204
2205 /* Return 1 if this operand is a valid input for a move insn. */
2206
2207 int
2208 input_operand (op, mode)
2209 rtx op;
2210 enum machine_mode mode;
2211 {
2212 /* Memory is always valid. */
2213 if (memory_operand (op, mode))
2214 return 1;
2215
2216 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
2217 if (GET_CODE (op) == CONSTANT_P_RTX)
2218 return 1;
2219
2220 /* For floating-point, easy constants are valid. */
2221 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2222 && CONSTANT_P (op)
2223 && easy_fp_constant (op, mode))
2224 return 1;
2225
2226 /* Allow any integer constant. */
2227 if (GET_MODE_CLASS (mode) == MODE_INT
2228 && (GET_CODE (op) == CONST_INT
2229 || GET_CODE (op) == CONST_DOUBLE))
2230 return 1;
2231
2232 /* Allow easy vector constants. */
2233 if (GET_CODE (op) == CONST_VECTOR
2234 && easy_vector_constant (op, mode))
2235 return 1;
2236
2237 /* For floating-point or multi-word mode, the only remaining valid type
2238 is a register. */
2239 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2240 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2241 return register_operand (op, mode);
2242
2243 /* The only cases left are integral modes one word or smaller (we
2244 do not get called for MODE_CC values). These can be in any
2245 register. */
2246 if (register_operand (op, mode))
2247 return 1;
2248
2249 /* A SYMBOL_REF referring to the TOC is valid. */
2250 if (legitimate_constant_pool_address_p (op))
2251 return 1;
2252
2253 /* A constant pool expression (relative to the TOC) is valid */
2254 if (toc_relative_expr_p (op))
2255 return 1;
2256
2257 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2258 to be valid. */
2259 if (DEFAULT_ABI == ABI_V4
2260 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2261 && small_data_operand (op, Pmode))
2262 return 1;
2263
2264 return 0;
2265 }
2266
2267 /* Return 1 for an operand in small memory on V.4/eabi. */
2268
2269 int
2270 small_data_operand (op, mode)
2271 rtx op ATTRIBUTE_UNUSED;
2272 enum machine_mode mode ATTRIBUTE_UNUSED;
2273 {
2274 #if TARGET_ELF
2275 rtx sym_ref;
2276
2277 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2278 return 0;
2279
2280 if (DEFAULT_ABI != ABI_V4)
2281 return 0;
2282
2283 if (GET_CODE (op) == SYMBOL_REF)
2284 sym_ref = op;
2285
2286 else if (GET_CODE (op) != CONST
2287 || GET_CODE (XEXP (op, 0)) != PLUS
2288 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2289 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2290 return 0;
2291
2292 else
2293 {
2294 rtx sum = XEXP (op, 0);
2295 HOST_WIDE_INT summand;
2296
2297 /* We have to be careful here, because it is the referenced address
2298 that must be 32k from _SDA_BASE_, not just the symbol. */
2299 summand = INTVAL (XEXP (sum, 1));
2300 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2301 return 0;
2302
2303 sym_ref = XEXP (sum, 0);
2304 }
2305
2306 return SYMBOL_REF_SMALL_P (sym_ref);
2307 #else
2308 return 0;
2309 #endif
2310 }
2311
2312 /* Return 1 for all valid move insn operand combination involving altivec
2313 vectors in gprs. */
2314
2315 int
2316 altivec_in_gprs_p (rtx op0, rtx op1)
2317 {
2318 if (REG_P (op0) && REGNO_REG_CLASS (REGNO (op0)) == GENERAL_REGS)
2319 return 1;
2320
2321 if (REG_P (op1) && REGNO_REG_CLASS (REGNO (op1)) == GENERAL_REGS)
2322 return 1;
2323 return 0;
2324 }
2325
2326 \f
2327 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2328
2329 static int
2330 constant_pool_expr_1 (op, have_sym, have_toc)
2331 rtx op;
2332 int *have_sym;
2333 int *have_toc;
2334 {
2335 switch (GET_CODE(op))
2336 {
2337 case SYMBOL_REF:
2338 if (RS6000_SYMBOL_REF_TLS_P (op))
2339 return 0;
2340 else if (CONSTANT_POOL_ADDRESS_P (op))
2341 {
2342 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2343 {
2344 *have_sym = 1;
2345 return 1;
2346 }
2347 else
2348 return 0;
2349 }
2350 else if (! strcmp (XSTR (op, 0), toc_label_name))
2351 {
2352 *have_toc = 1;
2353 return 1;
2354 }
2355 else
2356 return 0;
2357 case PLUS:
2358 case MINUS:
2359 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2360 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2361 case CONST:
2362 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2363 case CONST_INT:
2364 return 1;
2365 default:
2366 return 0;
2367 }
2368 }
2369
2370 static bool
2371 constant_pool_expr_p (op)
2372 rtx op;
2373 {
2374 int have_sym = 0;
2375 int have_toc = 0;
2376 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2377 }
2378
2379 static bool
2380 toc_relative_expr_p (op)
2381 rtx op;
2382 {
2383 int have_sym = 0;
2384 int have_toc = 0;
2385 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2386 }
2387
2388 /* SPE offset addressing is limited to 5-bits worth of double words. */
2389 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2390
2391 bool
2392 legitimate_constant_pool_address_p (x)
2393 rtx x;
2394 {
2395 return (TARGET_TOC
2396 && GET_CODE (x) == PLUS
2397 && GET_CODE (XEXP (x, 0)) == REG
2398 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2399 && constant_pool_expr_p (XEXP (x, 1)));
2400 }
2401
2402 static bool
2403 legitimate_small_data_p (mode, x)
2404 enum machine_mode mode;
2405 rtx x;
2406 {
2407 return (DEFAULT_ABI == ABI_V4
2408 && !flag_pic && !TARGET_TOC
2409 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2410 && small_data_operand (x, mode));
2411 }
2412
2413 static bool
2414 legitimate_offset_address_p (mode, x, strict)
2415 enum machine_mode mode;
2416 rtx x;
2417 int strict;
2418 {
2419 unsigned HOST_WIDE_INT offset, extra;
2420
2421 if (GET_CODE (x) != PLUS)
2422 return false;
2423 if (GET_CODE (XEXP (x, 0)) != REG)
2424 return false;
2425 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2426 return false;
2427 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2428 return false;
2429
2430 offset = INTVAL (XEXP (x, 1));
2431 extra = 0;
2432 switch (mode)
2433 {
2434 case V16QImode:
2435 case V8HImode:
2436 case V4SFmode:
2437 case V4SImode:
2438 /* AltiVec vector modes. Only reg+reg addressing is valid here,
2439 which leaves the only valid constant offset of zero, which by
2440 canonicalization rules is also invalid. */
2441 return false;
2442
2443 case V4HImode:
2444 case V2SImode:
2445 case V1DImode:
2446 case V2SFmode:
2447 /* SPE vector modes. */
2448 return SPE_CONST_OFFSET_OK (offset);
2449
2450 case DFmode:
2451 case DImode:
2452 if (TARGET_32BIT)
2453 extra = 4;
2454 else if (offset & 3)
2455 return false;
2456 break;
2457
2458 case TFmode:
2459 case TImode:
2460 if (TARGET_32BIT)
2461 extra = 12;
2462 else if (offset & 3)
2463 return false;
2464 else
2465 extra = 8;
2466 break;
2467
2468 default:
2469 break;
2470 }
2471
2472 return (offset + extra >= offset) && (offset + extra + 0x8000 < 0x10000);
2473 }
2474
2475 static bool
2476 legitimate_indexed_address_p (x, strict)
2477 rtx x;
2478 int strict;
2479 {
2480 rtx op0, op1;
2481
2482 if (GET_CODE (x) != PLUS)
2483 return false;
2484 op0 = XEXP (x, 0);
2485 op1 = XEXP (x, 1);
2486
2487 if (!REG_P (op0) || !REG_P (op1))
2488 return false;
2489
2490 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
2491 && INT_REG_OK_FOR_INDEX_P (op1, strict))
2492 || (INT_REG_OK_FOR_BASE_P (op1, strict)
2493 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
2494 }
2495
2496 static inline bool
2497 legitimate_indirect_address_p (x, strict)
2498 rtx x;
2499 int strict;
2500 {
2501 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2502 }
2503
2504 static bool
2505 legitimate_lo_sum_address_p (mode, x, strict)
2506 enum machine_mode mode;
2507 rtx x;
2508 int strict;
2509 {
2510 if (GET_CODE (x) != LO_SUM)
2511 return false;
2512 if (GET_CODE (XEXP (x, 0)) != REG)
2513 return false;
2514 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2515 return false;
2516 x = XEXP (x, 1);
2517
2518 if (TARGET_ELF)
2519 {
2520 if (DEFAULT_ABI != ABI_AIX && flag_pic)
2521 return false;
2522 if (TARGET_TOC)
2523 return false;
2524 if (GET_MODE_NUNITS (mode) != 1)
2525 return false;
2526 if (GET_MODE_BITSIZE (mode) > 32
2527 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode))
2528 return false;
2529
2530 return CONSTANT_P (x);
2531 }
2532
2533 return false;
2534 }
2535
2536
2537 /* Try machine-dependent ways of modifying an illegitimate address
2538 to be legitimate. If we find one, return the new, valid address.
2539 This is used from only one place: `memory_address' in explow.c.
2540
2541 OLDX is the address as it was before break_out_memory_refs was
2542 called. In some cases it is useful to look at this to decide what
2543 needs to be done.
2544
2545 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2546
2547 It is always safe for this function to do nothing. It exists to
2548 recognize opportunities to optimize the output.
2549
2550 On RS/6000, first check for the sum of a register with a constant
2551 integer that is out of range. If so, generate code to add the
2552 constant with the low-order 16 bits masked to the register and force
2553 this result into another register (this can be done with `cau').
2554 Then generate an address of REG+(CONST&0xffff), allowing for the
2555 possibility of bit 16 being a one.
2556
2557 Then check for the sum of a register and something not constant, try to
2558 load the other things into a register and return the sum. */
2559
2560 rtx
2561 rs6000_legitimize_address (x, oldx, mode)
2562 rtx x;
2563 rtx oldx ATTRIBUTE_UNUSED;
2564 enum machine_mode mode;
2565 {
2566 if (GET_CODE (x) == SYMBOL_REF)
2567 {
2568 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2569 if (model != 0)
2570 return rs6000_legitimize_tls_address (x, model);
2571 }
2572
2573 if (GET_CODE (x) == PLUS
2574 && GET_CODE (XEXP (x, 0)) == REG
2575 && GET_CODE (XEXP (x, 1)) == CONST_INT
2576 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2577 {
2578 HOST_WIDE_INT high_int, low_int;
2579 rtx sum;
2580 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2581 high_int = INTVAL (XEXP (x, 1)) - low_int;
2582 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2583 GEN_INT (high_int)), 0);
2584 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2585 }
2586 else if (GET_CODE (x) == PLUS
2587 && GET_CODE (XEXP (x, 0)) == REG
2588 && GET_CODE (XEXP (x, 1)) != CONST_INT
2589 && GET_MODE_NUNITS (mode) == 1
2590 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2591 || TARGET_POWERPC64
2592 || (mode != DFmode && mode != TFmode))
2593 && (TARGET_POWERPC64 || mode != DImode)
2594 && mode != TImode)
2595 {
2596 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2597 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2598 }
2599 else if (ALTIVEC_VECTOR_MODE (mode))
2600 {
2601 rtx reg;
2602
2603 /* Make sure both operands are registers. */
2604 if (GET_CODE (x) == PLUS)
2605 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2606 force_reg (Pmode, XEXP (x, 1)));
2607
2608 reg = force_reg (Pmode, x);
2609 return reg;
2610 }
2611 else if (SPE_VECTOR_MODE (mode))
2612 {
2613 /* We accept [reg + reg] and [reg + OFFSET]. */
2614
2615 if (GET_CODE (x) == PLUS)
2616 {
2617 rtx op1 = XEXP (x, 0);
2618 rtx op2 = XEXP (x, 1);
2619
2620 op1 = force_reg (Pmode, op1);
2621
2622 if (GET_CODE (op2) != REG
2623 && (GET_CODE (op2) != CONST_INT
2624 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2625 op2 = force_reg (Pmode, op2);
2626
2627 return gen_rtx_PLUS (Pmode, op1, op2);
2628 }
2629
2630 return force_reg (Pmode, x);
2631 }
2632 else if (TARGET_ELF
2633 && TARGET_32BIT
2634 && TARGET_NO_TOC
2635 && ! flag_pic
2636 && GET_CODE (x) != CONST_INT
2637 && GET_CODE (x) != CONST_DOUBLE
2638 && CONSTANT_P (x)
2639 && GET_MODE_NUNITS (mode) == 1
2640 && (GET_MODE_BITSIZE (mode) <= 32
2641 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2642 {
2643 rtx reg = gen_reg_rtx (Pmode);
2644 emit_insn (gen_elf_high (reg, (x)));
2645 return gen_rtx_LO_SUM (Pmode, reg, (x));
2646 }
2647 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2648 && ! flag_pic
2649 #if TARGET_MACHO
2650 && ! MACHO_DYNAMIC_NO_PIC_P
2651 #endif
2652 && GET_CODE (x) != CONST_INT
2653 && GET_CODE (x) != CONST_DOUBLE
2654 && CONSTANT_P (x)
2655 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2656 && mode != DImode
2657 && mode != TImode)
2658 {
2659 rtx reg = gen_reg_rtx (Pmode);
2660 emit_insn (gen_macho_high (reg, (x)));
2661 return gen_rtx_LO_SUM (Pmode, reg, (x));
2662 }
2663 else if (TARGET_TOC
2664 && constant_pool_expr_p (x)
2665 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2666 {
2667 return create_TOC_reference (x);
2668 }
2669 else
2670 return NULL_RTX;
2671 }
2672
2673 /* Construct the SYMBOL_REF for the tls_get_addr function. */
2674
2675 static GTY(()) rtx rs6000_tls_symbol;
2676 static rtx
2677 rs6000_tls_get_addr ()
2678 {
2679 if (!rs6000_tls_symbol)
2680 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
2681
2682 return rs6000_tls_symbol;
2683 }
2684
2685 /* Construct the SYMBOL_REF for TLS GOT references. */
2686
2687 static GTY(()) rtx rs6000_got_symbol;
2688 static rtx
2689 rs6000_got_sym ()
2690 {
2691 if (!rs6000_got_symbol)
2692 {
2693 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2694 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
2695 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
2696 }
2697
2698 return rs6000_got_symbol;
2699 }
2700
2701 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2702 this (thread-local) address. */
2703
2704 static rtx
2705 rs6000_legitimize_tls_address (addr, model)
2706 rtx addr;
2707 enum tls_model model;
2708 {
2709 rtx dest, insn;
2710
2711 dest = gen_reg_rtx (Pmode);
2712 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
2713 {
2714 rtx tlsreg;
2715
2716 if (TARGET_64BIT)
2717 {
2718 tlsreg = gen_rtx_REG (Pmode, 13);
2719 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
2720 }
2721 else
2722 {
2723 tlsreg = gen_rtx_REG (Pmode, 2);
2724 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
2725 }
2726 emit_insn (insn);
2727 }
2728 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
2729 {
2730 rtx tlsreg, tmp;
2731
2732 tmp = gen_reg_rtx (Pmode);
2733 if (TARGET_64BIT)
2734 {
2735 tlsreg = gen_rtx_REG (Pmode, 13);
2736 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
2737 }
2738 else
2739 {
2740 tlsreg = gen_rtx_REG (Pmode, 2);
2741 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
2742 }
2743 emit_insn (insn);
2744 if (TARGET_64BIT)
2745 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
2746 else
2747 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
2748 emit_insn (insn);
2749 }
2750 else
2751 {
2752 rtx r3, got, tga, tmp1, tmp2, eqv;
2753
2754 if (TARGET_64BIT)
2755 got = gen_rtx_REG (Pmode, TOC_REGISTER);
2756 else
2757 {
2758 if (flag_pic == 1)
2759 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
2760 else
2761 {
2762 rtx gsym = rs6000_got_sym ();
2763 got = gen_reg_rtx (Pmode);
2764 if (flag_pic == 0)
2765 rs6000_emit_move (got, gsym, Pmode);
2766 else
2767 {
2768 char buf[30];
2769 static int tls_got_labelno = 0;
2770 rtx tempLR, lab, tmp3, mem;
2771 rtx first, last;
2772
2773 ASM_GENERATE_INTERNAL_LABEL (buf, "LTLS", tls_got_labelno++);
2774 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2775 tempLR = gen_reg_rtx (Pmode);
2776 tmp1 = gen_reg_rtx (Pmode);
2777 tmp2 = gen_reg_rtx (Pmode);
2778 tmp3 = gen_reg_rtx (Pmode);
2779 mem = gen_rtx_MEM (Pmode, tmp1);
2780 RTX_UNCHANGING_P (mem) = 1;
2781
2782 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, lab,
2783 gsym));
2784 emit_move_insn (tmp1, tempLR);
2785 emit_move_insn (tmp2, mem);
2786 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
2787 last = emit_move_insn (got, tmp3);
2788 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
2789 REG_NOTES (last));
2790 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
2791 REG_NOTES (first));
2792 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
2793 REG_NOTES (last));
2794 }
2795 }
2796 }
2797
2798 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
2799 {
2800 r3 = gen_rtx_REG (Pmode, 3);
2801 if (TARGET_64BIT)
2802 insn = gen_tls_gd_64 (r3, got, addr);
2803 else
2804 insn = gen_tls_gd_32 (r3, got, addr);
2805 start_sequence ();
2806 emit_insn (insn);
2807 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2808 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2809 insn = emit_call_insn (insn);
2810 CONST_OR_PURE_CALL_P (insn) = 1;
2811 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2812 insn = get_insns ();
2813 end_sequence ();
2814 emit_libcall_block (insn, dest, r3, addr);
2815 }
2816 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
2817 {
2818 r3 = gen_rtx_REG (Pmode, 3);
2819 if (TARGET_64BIT)
2820 insn = gen_tls_ld_64 (r3, got);
2821 else
2822 insn = gen_tls_ld_32 (r3, got);
2823 start_sequence ();
2824 emit_insn (insn);
2825 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2826 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2827 insn = emit_call_insn (insn);
2828 CONST_OR_PURE_CALL_P (insn) = 1;
2829 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2830 insn = get_insns ();
2831 end_sequence ();
2832 tmp1 = gen_reg_rtx (Pmode);
2833 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
2834 UNSPEC_TLSLD);
2835 emit_libcall_block (insn, tmp1, r3, eqv);
2836 if (rs6000_tls_size == 16)
2837 {
2838 if (TARGET_64BIT)
2839 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
2840 else
2841 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
2842 }
2843 else if (rs6000_tls_size == 32)
2844 {
2845 tmp2 = gen_reg_rtx (Pmode);
2846 if (TARGET_64BIT)
2847 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
2848 else
2849 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
2850 emit_insn (insn);
2851 if (TARGET_64BIT)
2852 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
2853 else
2854 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
2855 }
2856 else
2857 {
2858 tmp2 = gen_reg_rtx (Pmode);
2859 if (TARGET_64BIT)
2860 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
2861 else
2862 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
2863 emit_insn (insn);
2864 insn = gen_rtx_SET (Pmode, dest,
2865 gen_rtx_PLUS (Pmode, tmp2, tmp1));
2866 }
2867 emit_insn (insn);
2868 }
2869 else
2870 {
2871 /* IE, or 64 bit offset LE. */
2872 tmp2 = gen_reg_rtx (Pmode);
2873 if (TARGET_64BIT)
2874 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
2875 else
2876 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
2877 emit_insn (insn);
2878 if (TARGET_64BIT)
2879 insn = gen_tls_tls_64 (dest, tmp2, addr);
2880 else
2881 insn = gen_tls_tls_32 (dest, tmp2, addr);
2882 emit_insn (insn);
2883 }
2884 }
2885
2886 return dest;
2887 }
2888
2889 /* Return 1 if X is a SYMBOL_REF for a TLS symbol. This is used in
2890 instruction definitions. */
2891
2892 int
2893 rs6000_tls_symbol_ref (x, mode)
2894 rtx x;
2895 enum machine_mode mode ATTRIBUTE_UNUSED;
2896 {
2897 return RS6000_SYMBOL_REF_TLS_P (x);
2898 }
2899
2900 /* Return 1 if X contains a thread-local symbol. */
2901
2902 bool
2903 rs6000_tls_referenced_p (x)
2904 rtx x;
2905 {
2906 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
2907 }
2908
2909 /* Return 1 if *X is a thread-local symbol. This is the same as
2910 rs6000_tls_symbol_ref except for the type of the unused argument. */
2911
2912 static inline int
2913 rs6000_tls_symbol_ref_1 (x, data)
2914 rtx *x;
2915 void *data ATTRIBUTE_UNUSED;
2916 {
2917 return RS6000_SYMBOL_REF_TLS_P (*x);
2918 }
2919
2920 /* The convention appears to be to define this wherever it is used.
2921 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2922 is now used here. */
2923 #ifndef REG_MODE_OK_FOR_BASE_P
2924 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2925 #endif
2926
2927 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2928 replace the input X, or the original X if no replacement is called for.
2929 The output parameter *WIN is 1 if the calling macro should goto WIN,
2930 0 if it should not.
2931
2932 For RS/6000, we wish to handle large displacements off a base
2933 register by splitting the addend across an addiu/addis and the mem insn.
2934 This cuts number of extra insns needed from 3 to 1.
2935
2936 On Darwin, we use this to generate code for floating point constants.
2937 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2938 The Darwin code is inside #if TARGET_MACHO because only then is
2939 machopic_function_base_name() defined. */
2940 rtx
2941 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
2942 rtx x;
2943 enum machine_mode mode;
2944 int opnum;
2945 int type;
2946 int ind_levels ATTRIBUTE_UNUSED;
2947 int *win;
2948 {
2949 /* We must recognize output that we have already generated ourselves. */
2950 if (GET_CODE (x) == PLUS
2951 && GET_CODE (XEXP (x, 0)) == PLUS
2952 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2953 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2954 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2955 {
2956 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2957 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2958 opnum, (enum reload_type)type);
2959 *win = 1;
2960 return x;
2961 }
2962
2963 #if TARGET_MACHO
2964 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
2965 && GET_CODE (x) == LO_SUM
2966 && GET_CODE (XEXP (x, 0)) == PLUS
2967 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
2968 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
2969 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
2970 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
2971 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
2972 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
2973 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
2974 {
2975 /* Result of previous invocation of this function on Darwin
2976 floating point constant. */
2977 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2978 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2979 opnum, (enum reload_type)type);
2980 *win = 1;
2981 return x;
2982 }
2983 #endif
2984 if (GET_CODE (x) == PLUS
2985 && GET_CODE (XEXP (x, 0)) == REG
2986 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2987 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
2988 && GET_CODE (XEXP (x, 1)) == CONST_INT
2989 && !SPE_VECTOR_MODE (mode)
2990 && !ALTIVEC_VECTOR_MODE (mode))
2991 {
2992 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2993 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2994 HOST_WIDE_INT high
2995 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2996
2997 /* Check for 32-bit overflow. */
2998 if (high + low != val)
2999 {
3000 *win = 0;
3001 return x;
3002 }
3003
3004 /* Reload the high part into a base reg; leave the low part
3005 in the mem directly. */
3006
3007 x = gen_rtx_PLUS (GET_MODE (x),
3008 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3009 GEN_INT (high)),
3010 GEN_INT (low));
3011
3012 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3013 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3014 opnum, (enum reload_type)type);
3015 *win = 1;
3016 return x;
3017 }
3018 #if TARGET_MACHO
3019 if (GET_CODE (x) == SYMBOL_REF
3020 && DEFAULT_ABI == ABI_DARWIN
3021 && !ALTIVEC_VECTOR_MODE (mode)
3022 && flag_pic)
3023 {
3024 /* Darwin load of floating point constant. */
3025 rtx offset = gen_rtx (CONST, Pmode,
3026 gen_rtx (MINUS, Pmode, x,
3027 gen_rtx (SYMBOL_REF, Pmode,
3028 machopic_function_base_name ())));
3029 x = gen_rtx (LO_SUM, GET_MODE (x),
3030 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
3031 gen_rtx (HIGH, Pmode, offset)), offset);
3032 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3033 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3034 opnum, (enum reload_type)type);
3035 *win = 1;
3036 return x;
3037 }
3038 if (GET_CODE (x) == SYMBOL_REF
3039 && DEFAULT_ABI == ABI_DARWIN
3040 && !ALTIVEC_VECTOR_MODE (mode)
3041 && MACHO_DYNAMIC_NO_PIC_P)
3042 {
3043 /* Darwin load of floating point constant. */
3044 x = gen_rtx (LO_SUM, GET_MODE (x),
3045 gen_rtx (HIGH, Pmode, x), x);
3046 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3047 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3048 opnum, (enum reload_type)type);
3049 *win = 1;
3050 return x;
3051 }
3052 #endif
3053 if (TARGET_TOC
3054 && constant_pool_expr_p (x)
3055 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3056 {
3057 (x) = create_TOC_reference (x);
3058 *win = 1;
3059 return x;
3060 }
3061 *win = 0;
3062 return x;
3063 }
3064
3065 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3066 that is a valid memory address for an instruction.
3067 The MODE argument is the machine mode for the MEM expression
3068 that wants to use this address.
3069
3070 On the RS/6000, there are four valid address: a SYMBOL_REF that
3071 refers to a constant pool entry of an address (or the sum of it
3072 plus a constant), a short (16-bit signed) constant plus a register,
3073 the sum of two registers, or a register indirect, possibly with an
3074 auto-increment. For DFmode and DImode with a constant plus register,
3075 we must ensure that both words are addressable or PowerPC64 with offset
3076 word aligned.
3077
3078 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3079 32-bit DImode, TImode), indexed addressing cannot be used because
3080 adjacent memory cells are accessed by adding word-sized offsets
3081 during assembly output. */
3082 int
3083 rs6000_legitimate_address (mode, x, reg_ok_strict)
3084 enum machine_mode mode;
3085 rtx x;
3086 int reg_ok_strict;
3087 {
3088 if (RS6000_SYMBOL_REF_TLS_P (x))
3089 return 0;
3090 if (legitimate_indirect_address_p (x, reg_ok_strict))
3091 return 1;
3092 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3093 && !ALTIVEC_VECTOR_MODE (mode)
3094 && !SPE_VECTOR_MODE (mode)
3095 && TARGET_UPDATE
3096 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3097 return 1;
3098 if (legitimate_small_data_p (mode, x))
3099 return 1;
3100 if (legitimate_constant_pool_address_p (x))
3101 return 1;
3102 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3103 if (! reg_ok_strict
3104 && GET_CODE (x) == PLUS
3105 && GET_CODE (XEXP (x, 0)) == REG
3106 && XEXP (x, 0) == virtual_stack_vars_rtx
3107 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3108 return 1;
3109 if (legitimate_offset_address_p (mode, x, reg_ok_strict))
3110 return 1;
3111 if (mode != TImode
3112 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3113 || TARGET_POWERPC64
3114 || (mode != DFmode && mode != TFmode))
3115 && (TARGET_POWERPC64 || mode != DImode)
3116 && legitimate_indexed_address_p (x, reg_ok_strict))
3117 return 1;
3118 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3119 return 1;
3120 return 0;
3121 }
3122
3123 /* Go to LABEL if ADDR (a legitimate address expression)
3124 has an effect that depends on the machine mode it is used for.
3125
3126 On the RS/6000 this is true of all integral offsets (since AltiVec
3127 modes don't allow them) or is a pre-increment or decrement.
3128
3129 ??? Except that due to conceptual problems in offsettable_address_p
3130 we can't really report the problems of integral offsets. So leave
3131 this assuming that the adjustable offset must be valid for the
3132 sub-words of a TFmode operand, which is what we had before. */
3133
3134 bool
3135 rs6000_mode_dependent_address (addr)
3136 rtx addr;
3137 {
3138 switch (GET_CODE (addr))
3139 {
3140 case PLUS:
3141 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3142 {
3143 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3144 return val + 12 + 0x8000 >= 0x10000;
3145 }
3146 break;
3147
3148 case LO_SUM:
3149 return true;
3150
3151 case PRE_INC:
3152 case PRE_DEC:
3153 return TARGET_UPDATE;
3154
3155 default:
3156 break;
3157 }
3158
3159 return false;
3160 }
3161 \f
3162 /* Try to output insns to set TARGET equal to the constant C if it can
3163 be done in less than N insns. Do all computations in MODE.
3164 Returns the place where the output has been placed if it can be
3165 done and the insns have been emitted. If it would take more than N
3166 insns, zero is returned and no insns and emitted. */
3167
3168 rtx
3169 rs6000_emit_set_const (dest, mode, source, n)
3170 rtx dest, source;
3171 enum machine_mode mode;
3172 int n ATTRIBUTE_UNUSED;
3173 {
3174 rtx result, insn, set;
3175 HOST_WIDE_INT c0, c1;
3176
3177 if (mode == QImode || mode == HImode)
3178 {
3179 if (dest == NULL)
3180 dest = gen_reg_rtx (mode);
3181 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3182 return dest;
3183 }
3184 else if (mode == SImode)
3185 {
3186 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3187
3188 emit_insn (gen_rtx_SET (VOIDmode, result,
3189 GEN_INT (INTVAL (source)
3190 & (~ (HOST_WIDE_INT) 0xffff))));
3191 emit_insn (gen_rtx_SET (VOIDmode, dest,
3192 gen_rtx_IOR (SImode, result,
3193 GEN_INT (INTVAL (source) & 0xffff))));
3194 result = dest;
3195 }
3196 else if (mode == DImode)
3197 {
3198 if (GET_CODE (source) == CONST_INT)
3199 {
3200 c0 = INTVAL (source);
3201 c1 = -(c0 < 0);
3202 }
3203 else if (GET_CODE (source) == CONST_DOUBLE)
3204 {
3205 #if HOST_BITS_PER_WIDE_INT >= 64
3206 c0 = CONST_DOUBLE_LOW (source);
3207 c1 = -(c0 < 0);
3208 #else
3209 c0 = CONST_DOUBLE_LOW (source);
3210 c1 = CONST_DOUBLE_HIGH (source);
3211 #endif
3212 }
3213 else
3214 abort ();
3215
3216 result = rs6000_emit_set_long_const (dest, c0, c1);
3217 }
3218 else
3219 abort ();
3220
3221 insn = get_last_insn ();
3222 set = single_set (insn);
3223 if (! CONSTANT_P (SET_SRC (set)))
3224 set_unique_reg_note (insn, REG_EQUAL, source);
3225
3226 return result;
3227 }
3228
3229 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3230 fall back to a straight forward decomposition. We do this to avoid
3231 exponential run times encountered when looking for longer sequences
3232 with rs6000_emit_set_const. */
3233 static rtx
3234 rs6000_emit_set_long_const (dest, c1, c2)
3235 rtx dest;
3236 HOST_WIDE_INT c1, c2;
3237 {
3238 if (!TARGET_POWERPC64)
3239 {
3240 rtx operand1, operand2;
3241
3242 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3243 DImode);
3244 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3245 DImode);
3246 emit_move_insn (operand1, GEN_INT (c1));
3247 emit_move_insn (operand2, GEN_INT (c2));
3248 }
3249 else
3250 {
3251 HOST_WIDE_INT ud1, ud2, ud3, ud4;
3252
3253 ud1 = c1 & 0xffff;
3254 ud2 = (c1 & 0xffff0000) >> 16;
3255 #if HOST_BITS_PER_WIDE_INT >= 64
3256 c2 = c1 >> 32;
3257 #endif
3258 ud3 = c2 & 0xffff;
3259 ud4 = (c2 & 0xffff0000) >> 16;
3260
3261 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
3262 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
3263 {
3264 if (ud1 & 0x8000)
3265 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
3266 else
3267 emit_move_insn (dest, GEN_INT (ud1));
3268 }
3269
3270 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
3271 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
3272 {
3273 if (ud2 & 0x8000)
3274 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
3275 - 0x80000000));
3276 else
3277 emit_move_insn (dest, GEN_INT (ud2 << 16));
3278 if (ud1 != 0)
3279 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3280 }
3281 else if ((ud4 == 0xffff && (ud3 & 0x8000))
3282 || (ud4 == 0 && ! (ud3 & 0x8000)))
3283 {
3284 if (ud3 & 0x8000)
3285 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
3286 - 0x80000000));
3287 else
3288 emit_move_insn (dest, GEN_INT (ud3 << 16));
3289
3290 if (ud2 != 0)
3291 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3292 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3293 if (ud1 != 0)
3294 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3295 }
3296 else
3297 {
3298 if (ud4 & 0x8000)
3299 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
3300 - 0x80000000));
3301 else
3302 emit_move_insn (dest, GEN_INT (ud4 << 16));
3303
3304 if (ud3 != 0)
3305 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
3306
3307 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3308 if (ud2 != 0)
3309 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3310 GEN_INT (ud2 << 16)));
3311 if (ud1 != 0)
3312 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3313 }
3314 }
3315 return dest;
3316 }
3317
3318 /* Emit a move from SOURCE to DEST in mode MODE. */
3319 void
3320 rs6000_emit_move (dest, source, mode)
3321 rtx dest;
3322 rtx source;
3323 enum machine_mode mode;
3324 {
3325 rtx operands[2];
3326 operands[0] = dest;
3327 operands[1] = source;
3328
3329 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3330 if (GET_CODE (operands[1]) == CONST_DOUBLE
3331 && ! FLOAT_MODE_P (mode)
3332 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3333 {
3334 /* FIXME. This should never happen. */
3335 /* Since it seems that it does, do the safe thing and convert
3336 to a CONST_INT. */
3337 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
3338 }
3339 if (GET_CODE (operands[1]) == CONST_DOUBLE
3340 && ! FLOAT_MODE_P (mode)
3341 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
3342 && CONST_DOUBLE_LOW (operands[1]) >= 0)
3343 || (CONST_DOUBLE_HIGH (operands[1]) == -1
3344 && CONST_DOUBLE_LOW (operands[1]) < 0)))
3345 abort ();
3346
3347 /* Check if GCC is setting up a block move that will end up using FP
3348 registers as temporaries. We must make sure this is acceptable. */
3349 if (GET_CODE (operands[0]) == MEM
3350 && GET_CODE (operands[1]) == MEM
3351 && mode == DImode
3352 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3353 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3354 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3355 ? 32 : MEM_ALIGN (operands[0])))
3356 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3357 ? 32
3358 : MEM_ALIGN (operands[1]))))
3359 && ! MEM_VOLATILE_P (operands [0])
3360 && ! MEM_VOLATILE_P (operands [1]))
3361 {
3362 emit_move_insn (adjust_address (operands[0], SImode, 0),
3363 adjust_address (operands[1], SImode, 0));
3364 emit_move_insn (adjust_address (operands[0], SImode, 4),
3365 adjust_address (operands[1], SImode, 4));
3366 return;
3367 }
3368
3369 if (!no_new_pseudos)
3370 {
3371 if (GET_CODE (operands[1]) == MEM && optimize > 0
3372 && (mode == QImode || mode == HImode || mode == SImode)
3373 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
3374 {
3375 rtx reg = gen_reg_rtx (word_mode);
3376
3377 emit_insn (gen_rtx_SET (word_mode, reg,
3378 gen_rtx_ZERO_EXTEND (word_mode,
3379 operands[1])));
3380 operands[1] = gen_lowpart (mode, reg);
3381 }
3382 if (GET_CODE (operands[0]) != REG)
3383 operands[1] = force_reg (mode, operands[1]);
3384 }
3385
3386 if (mode == SFmode && ! TARGET_POWERPC
3387 && TARGET_HARD_FLOAT && TARGET_FPRS
3388 && GET_CODE (operands[0]) == MEM)
3389 {
3390 int regnum;
3391
3392 if (reload_in_progress || reload_completed)
3393 regnum = true_regnum (operands[1]);
3394 else if (GET_CODE (operands[1]) == REG)
3395 regnum = REGNO (operands[1]);
3396 else
3397 regnum = -1;
3398
3399 /* If operands[1] is a register, on POWER it may have
3400 double-precision data in it, so truncate it to single
3401 precision. */
3402 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
3403 {
3404 rtx newreg;
3405 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
3406 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
3407 operands[1] = newreg;
3408 }
3409 }
3410
3411 /* Recognize the case where operand[1] is a reference to thread-local
3412 data and load its address to a register. */
3413 if (GET_CODE (operands[1]) == SYMBOL_REF)
3414 {
3415 enum tls_model model = SYMBOL_REF_TLS_MODEL (operands[1]);
3416 if (model != 0)
3417 operands[1] = rs6000_legitimize_tls_address (operands[1], model);
3418 }
3419
3420 /* Handle the case where reload calls us with an invalid address. */
3421 if (reload_in_progress && mode == Pmode
3422 && (! general_operand (operands[1], mode)
3423 || ! nonimmediate_operand (operands[0], mode)))
3424 goto emit_set;
3425
3426 /* Handle the case of CONSTANT_P_RTX. */
3427 if (GET_CODE (operands[1]) == CONSTANT_P_RTX)
3428 goto emit_set;
3429
3430 /* FIXME: In the long term, this switch statement should go away
3431 and be replaced by a sequence of tests based on things like
3432 mode == Pmode. */
3433 switch (mode)
3434 {
3435 case HImode:
3436 case QImode:
3437 if (CONSTANT_P (operands[1])
3438 && GET_CODE (operands[1]) != CONST_INT)
3439 operands[1] = force_const_mem (mode, operands[1]);
3440 break;
3441
3442 case TFmode:
3443 case DFmode:
3444 case SFmode:
3445 if (CONSTANT_P (operands[1])
3446 && ! easy_fp_constant (operands[1], mode))
3447 operands[1] = force_const_mem (mode, operands[1]);
3448 break;
3449
3450 case V16QImode:
3451 case V8HImode:
3452 case V4SFmode:
3453 case V4SImode:
3454 case V4HImode:
3455 case V2SFmode:
3456 case V2SImode:
3457 case V1DImode:
3458 if (CONSTANT_P (operands[1])
3459 && !easy_vector_constant (operands[1], mode))
3460 operands[1] = force_const_mem (mode, operands[1]);
3461 break;
3462
3463 case SImode:
3464 case DImode:
3465 /* Use default pattern for address of ELF small data */
3466 if (TARGET_ELF
3467 && mode == Pmode
3468 && DEFAULT_ABI == ABI_V4
3469 && (GET_CODE (operands[1]) == SYMBOL_REF
3470 || GET_CODE (operands[1]) == CONST)
3471 && small_data_operand (operands[1], mode))
3472 {
3473 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3474 return;
3475 }
3476
3477 if (DEFAULT_ABI == ABI_V4
3478 && mode == Pmode && mode == SImode
3479 && flag_pic == 1 && got_operand (operands[1], mode))
3480 {
3481 emit_insn (gen_movsi_got (operands[0], operands[1]));
3482 return;
3483 }
3484
3485 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
3486 && TARGET_NO_TOC
3487 && ! flag_pic
3488 && mode == Pmode
3489 && CONSTANT_P (operands[1])
3490 && GET_CODE (operands[1]) != HIGH
3491 && GET_CODE (operands[1]) != CONST_INT)
3492 {
3493 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
3494
3495 /* If this is a function address on -mcall-aixdesc,
3496 convert it to the address of the descriptor. */
3497 if (DEFAULT_ABI == ABI_AIX
3498 && GET_CODE (operands[1]) == SYMBOL_REF
3499 && XSTR (operands[1], 0)[0] == '.')
3500 {
3501 const char *name = XSTR (operands[1], 0);
3502 rtx new_ref;
3503 while (*name == '.')
3504 name++;
3505 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
3506 CONSTANT_POOL_ADDRESS_P (new_ref)
3507 = CONSTANT_POOL_ADDRESS_P (operands[1]);
3508 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
3509 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
3510 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
3511 operands[1] = new_ref;
3512 }
3513
3514 if (DEFAULT_ABI == ABI_DARWIN)
3515 {
3516 #if TARGET_MACHO
3517 if (MACHO_DYNAMIC_NO_PIC_P)
3518 {
3519 /* Take care of any required data indirection. */
3520 operands[1] = rs6000_machopic_legitimize_pic_address (
3521 operands[1], mode, operands[0]);
3522 if (operands[0] != operands[1])
3523 emit_insn (gen_rtx_SET (VOIDmode,
3524 operands[0], operands[1]));
3525 return;
3526 }
3527 #endif
3528 emit_insn (gen_macho_high (target, operands[1]));
3529 emit_insn (gen_macho_low (operands[0], target, operands[1]));
3530 return;
3531 }
3532
3533 emit_insn (gen_elf_high (target, operands[1]));
3534 emit_insn (gen_elf_low (operands[0], target, operands[1]));
3535 return;
3536 }
3537
3538 /* If this is a SYMBOL_REF that refers to a constant pool entry,
3539 and we have put it in the TOC, we just need to make a TOC-relative
3540 reference to it. */
3541 if (TARGET_TOC
3542 && GET_CODE (operands[1]) == SYMBOL_REF
3543 && constant_pool_expr_p (operands[1])
3544 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
3545 get_pool_mode (operands[1])))
3546 {
3547 operands[1] = create_TOC_reference (operands[1]);
3548 }
3549 else if (mode == Pmode
3550 && CONSTANT_P (operands[1])
3551 && ((GET_CODE (operands[1]) != CONST_INT
3552 && ! easy_fp_constant (operands[1], mode))
3553 || (GET_CODE (operands[1]) == CONST_INT
3554 && num_insns_constant (operands[1], mode) > 2)
3555 || (GET_CODE (operands[0]) == REG
3556 && FP_REGNO_P (REGNO (operands[0]))))
3557 && GET_CODE (operands[1]) != HIGH
3558 && ! legitimate_constant_pool_address_p (operands[1])
3559 && ! toc_relative_expr_p (operands[1]))
3560 {
3561 /* Emit a USE operation so that the constant isn't deleted if
3562 expensive optimizations are turned on because nobody
3563 references it. This should only be done for operands that
3564 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
3565 This should not be done for operands that contain LABEL_REFs.
3566 For now, we just handle the obvious case. */
3567 if (GET_CODE (operands[1]) != LABEL_REF)
3568 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
3569
3570 #if TARGET_MACHO
3571 /* Darwin uses a special PIC legitimizer. */
3572 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
3573 {
3574 operands[1] =
3575 rs6000_machopic_legitimize_pic_address (operands[1], mode,
3576 operands[0]);
3577 if (operands[0] != operands[1])
3578 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3579 return;
3580 }
3581 #endif
3582
3583 /* If we are to limit the number of things we put in the TOC and
3584 this is a symbol plus a constant we can add in one insn,
3585 just put the symbol in the TOC and add the constant. Don't do
3586 this if reload is in progress. */
3587 if (GET_CODE (operands[1]) == CONST
3588 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
3589 && GET_CODE (XEXP (operands[1], 0)) == PLUS
3590 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
3591 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
3592 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
3593 && ! side_effects_p (operands[0]))
3594 {
3595 rtx sym =
3596 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
3597 rtx other = XEXP (XEXP (operands[1], 0), 1);
3598
3599 sym = force_reg (mode, sym);
3600 if (mode == SImode)
3601 emit_insn (gen_addsi3 (operands[0], sym, other));
3602 else
3603 emit_insn (gen_adddi3 (operands[0], sym, other));
3604 return;
3605 }
3606
3607 operands[1] = force_const_mem (mode, operands[1]);
3608
3609 if (TARGET_TOC
3610 && constant_pool_expr_p (XEXP (operands[1], 0))
3611 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3612 get_pool_constant (XEXP (operands[1], 0)),
3613 get_pool_mode (XEXP (operands[1], 0))))
3614 {
3615 operands[1]
3616 = gen_rtx_MEM (mode,
3617 create_TOC_reference (XEXP (operands[1], 0)));
3618 set_mem_alias_set (operands[1], get_TOC_alias_set ());
3619 RTX_UNCHANGING_P (operands[1]) = 1;
3620 }
3621 }
3622 break;
3623
3624 case TImode:
3625 if (GET_CODE (operands[0]) == MEM
3626 && GET_CODE (XEXP (operands[0], 0)) != REG
3627 && ! reload_in_progress)
3628 operands[0]
3629 = replace_equiv_address (operands[0],
3630 copy_addr_to_reg (XEXP (operands[0], 0)));
3631
3632 if (GET_CODE (operands[1]) == MEM
3633 && GET_CODE (XEXP (operands[1], 0)) != REG
3634 && ! reload_in_progress)
3635 operands[1]
3636 = replace_equiv_address (operands[1],
3637 copy_addr_to_reg (XEXP (operands[1], 0)));
3638 if (TARGET_POWER)
3639 {
3640 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3641 gen_rtvec (2,
3642 gen_rtx_SET (VOIDmode,
3643 operands[0], operands[1]),
3644 gen_rtx_CLOBBER (VOIDmode,
3645 gen_rtx_SCRATCH (SImode)))));
3646 return;
3647 }
3648 break;
3649
3650 default:
3651 abort ();
3652 }
3653
3654 /* Above, we may have called force_const_mem which may have returned
3655 an invalid address. If we can, fix this up; otherwise, reload will
3656 have to deal with it. */
3657 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
3658 operands[1] = validize_mem (operands[1]);
3659
3660 emit_set:
3661 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3662 }
3663 \f
3664 /* Initialize a variable CUM of type CUMULATIVE_ARGS
3665 for a call to a function whose data type is FNTYPE.
3666 For a library call, FNTYPE is 0.
3667
3668 For incoming args we set the number of arguments in the prototype large
3669 so we never return a PARALLEL. */
3670
3671 void
3672 init_cumulative_args (cum, fntype, libname, incoming, libcall)
3673 CUMULATIVE_ARGS *cum;
3674 tree fntype;
3675 rtx libname ATTRIBUTE_UNUSED;
3676 int incoming;
3677 int libcall;
3678 {
3679 static CUMULATIVE_ARGS zero_cumulative;
3680
3681 *cum = zero_cumulative;
3682 cum->words = 0;
3683 cum->fregno = FP_ARG_MIN_REG;
3684 cum->vregno = ALTIVEC_ARG_MIN_REG;
3685 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
3686 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
3687 ? CALL_LIBCALL : CALL_NORMAL);
3688 cum->sysv_gregno = GP_ARG_MIN_REG;
3689 cum->stdarg = fntype
3690 && (TYPE_ARG_TYPES (fntype) != 0
3691 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3692 != void_type_node));
3693
3694 if (incoming)
3695 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
3696
3697 else if (cum->prototype)
3698 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
3699 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
3700 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
3701
3702 else
3703 cum->nargs_prototype = 0;
3704
3705 /* Check for a longcall attribute. */
3706 if (fntype
3707 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
3708 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
3709 cum->call_cookie = CALL_LONG;
3710
3711 if (TARGET_DEBUG_ARG)
3712 {
3713 fprintf (stderr, "\ninit_cumulative_args:");
3714 if (fntype)
3715 {
3716 tree ret_type = TREE_TYPE (fntype);
3717 fprintf (stderr, " ret code = %s,",
3718 tree_code_name[ (int)TREE_CODE (ret_type) ]);
3719 }
3720
3721 if (cum->call_cookie & CALL_LONG)
3722 fprintf (stderr, " longcall,");
3723
3724 fprintf (stderr, " proto = %d, nargs = %d\n",
3725 cum->prototype, cum->nargs_prototype);
3726 }
3727 }
3728 \f
3729 /* If defined, a C expression which determines whether, and in which
3730 direction, to pad out an argument with extra space. The value
3731 should be of type `enum direction': either `upward' to pad above
3732 the argument, `downward' to pad below, or `none' to inhibit
3733 padding.
3734
3735 For the AIX ABI structs are always stored left shifted in their
3736 argument slot. */
3737
3738 enum direction
3739 function_arg_padding (mode, type)
3740 enum machine_mode mode;
3741 tree type;
3742 {
3743 #ifndef AGGREGATE_PADDING_FIXED
3744 #define AGGREGATE_PADDING_FIXED 0
3745 #endif
3746 #ifndef AGGREGATES_PAD_UPWARD_ALWAYS
3747 #define AGGREGATES_PAD_UPWARD_ALWAYS 0
3748 #endif
3749
3750 if (!AGGREGATE_PADDING_FIXED)
3751 {
3752 /* GCC used to pass structures of the same size as integer types as
3753 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
3754 ie. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
3755 passed padded downward, except that -mstrict-align further
3756 muddied the water in that multi-component structures of 2 and 4
3757 bytes in size were passed padded upward.
3758
3759 The following arranges for best compatibility with previous
3760 versions of gcc, but removes the -mstrict-align dependency. */
3761 if (BYTES_BIG_ENDIAN)
3762 {
3763 HOST_WIDE_INT size = 0;
3764
3765 if (mode == BLKmode)
3766 {
3767 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
3768 size = int_size_in_bytes (type);
3769 }
3770 else
3771 size = GET_MODE_SIZE (mode);
3772
3773 if (size == 1 || size == 2 || size == 4)
3774 return downward;
3775 }
3776 return upward;
3777 }
3778
3779 if (AGGREGATES_PAD_UPWARD_ALWAYS)
3780 {
3781 if (type != 0 && AGGREGATE_TYPE_P (type))
3782 return upward;
3783 }
3784
3785 /* This is the default definition. */
3786 return (! BYTES_BIG_ENDIAN
3787 ? upward
3788 : ((mode == BLKmode
3789 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
3790 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
3791 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
3792 ? downward : upward));
3793 }
3794
3795 /* If defined, a C expression that gives the alignment boundary, in bits,
3796 of an argument with the specified mode and type. If it is not defined,
3797 PARM_BOUNDARY is used for all arguments.
3798
3799 V.4 wants long longs to be double word aligned. */
3800
3801 int
3802 function_arg_boundary (mode, type)
3803 enum machine_mode mode;
3804 tree type ATTRIBUTE_UNUSED;
3805 {
3806 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
3807 return 64;
3808 else if (SPE_VECTOR_MODE (mode))
3809 return 64;
3810 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3811 return 128;
3812 else
3813 return PARM_BOUNDARY;
3814 }
3815 \f
3816 /* Update the data in CUM to advance over an argument
3817 of mode MODE and data type TYPE.
3818 (TYPE is null for libcalls where that information may not be available.) */
3819
3820 void
3821 function_arg_advance (cum, mode, type, named)
3822 CUMULATIVE_ARGS *cum;
3823 enum machine_mode mode;
3824 tree type;
3825 int named;
3826 {
3827 cum->nargs_prototype--;
3828
3829 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3830 {
3831 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
3832 cum->vregno++;
3833 else
3834 cum->words += RS6000_ARG_SIZE (mode, type);
3835 }
3836 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
3837 && !cum->stdarg
3838 && cum->sysv_gregno <= GP_ARG_MAX_REG)
3839 cum->sysv_gregno++;
3840 else if (DEFAULT_ABI == ABI_V4)
3841 {
3842 if (TARGET_HARD_FLOAT && TARGET_FPRS
3843 && (mode == SFmode || mode == DFmode))
3844 {
3845 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3846 cum->fregno++;
3847 else
3848 {
3849 if (mode == DFmode)
3850 cum->words += cum->words & 1;
3851 cum->words += RS6000_ARG_SIZE (mode, type);
3852 }
3853 }
3854 else
3855 {
3856 int n_words;
3857 int gregno = cum->sysv_gregno;
3858
3859 /* Aggregates and IEEE quad get passed by reference. */
3860 if ((type && AGGREGATE_TYPE_P (type))
3861 || mode == TFmode)
3862 n_words = 1;
3863 else
3864 n_words = RS6000_ARG_SIZE (mode, type);
3865
3866 /* Long long and SPE vectors are put in odd registers. */
3867 if (n_words == 2 && (gregno & 1) == 0)
3868 gregno += 1;
3869
3870 /* Long long and SPE vectors are not split between registers
3871 and stack. */
3872 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
3873 {
3874 /* Long long is aligned on the stack. */
3875 if (n_words == 2)
3876 cum->words += cum->words & 1;
3877 cum->words += n_words;
3878 }
3879
3880 /* Note: continuing to accumulate gregno past when we've started
3881 spilling to the stack indicates the fact that we've started
3882 spilling to the stack to expand_builtin_saveregs. */
3883 cum->sysv_gregno = gregno + n_words;
3884 }
3885
3886 if (TARGET_DEBUG_ARG)
3887 {
3888 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3889 cum->words, cum->fregno);
3890 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
3891 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
3892 fprintf (stderr, "mode = %4s, named = %d\n",
3893 GET_MODE_NAME (mode), named);
3894 }
3895 }
3896 else
3897 {
3898 int align = (TARGET_32BIT && (cum->words & 1) != 0
3899 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3900
3901 cum->words += align + RS6000_ARG_SIZE (mode, type);
3902
3903 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3904 && TARGET_HARD_FLOAT && TARGET_FPRS)
3905 cum->fregno += (mode == TFmode ? 2 : 1);
3906
3907 if (TARGET_DEBUG_ARG)
3908 {
3909 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3910 cum->words, cum->fregno);
3911 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
3912 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
3913 fprintf (stderr, "named = %d, align = %d\n", named, align);
3914 }
3915 }
3916 }
3917
3918 /* Determine where to put a SIMD argument on the SPE. */
3919 static rtx
3920 rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type)
3921 {
3922 if (cum->stdarg)
3923 {
3924 int gregno = cum->sysv_gregno;
3925 int n_words = RS6000_ARG_SIZE (mode, type);
3926
3927 /* SPE vectors are put in odd registers. */
3928 if (n_words == 2 && (gregno & 1) == 0)
3929 gregno += 1;
3930
3931 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
3932 {
3933 rtx r1, r2;
3934 enum machine_mode m = SImode;
3935
3936 r1 = gen_rtx_REG (m, gregno);
3937 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
3938 r2 = gen_rtx_REG (m, gregno + 1);
3939 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
3940 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
3941 }
3942 else
3943 return NULL;
3944 }
3945 else
3946 {
3947 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
3948 return gen_rtx_REG (mode, cum->sysv_gregno);
3949 else
3950 return NULL;
3951 }
3952 }
3953
3954 /* Determine where to put an argument to a function.
3955 Value is zero to push the argument on the stack,
3956 or a hard register in which to store the argument.
3957
3958 MODE is the argument's machine mode.
3959 TYPE is the data type of the argument (as a tree).
3960 This is null for libcalls where that information may
3961 not be available.
3962 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3963 the preceding args and about the function being called.
3964 NAMED is nonzero if this argument is a named parameter
3965 (otherwise it is an extra parameter matching an ellipsis).
3966
3967 On RS/6000 the first eight words of non-FP are normally in registers
3968 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3969 Under V.4, the first 8 FP args are in registers.
3970
3971 If this is floating-point and no prototype is specified, we use
3972 both an FP and integer register (or possibly FP reg and stack). Library
3973 functions (when CALL_LIBCALL is set) always have the proper types for args,
3974 so we can pass the FP value just in one register. emit_library_function
3975 doesn't support PARALLEL anyway. */
3976
3977 struct rtx_def *
3978 function_arg (cum, mode, type, named)
3979 CUMULATIVE_ARGS *cum;
3980 enum machine_mode mode;
3981 tree type;
3982 int named;
3983 {
3984 enum rs6000_abi abi = DEFAULT_ABI;
3985
3986 /* Return a marker to indicate whether CR1 needs to set or clear the
3987 bit that V.4 uses to say fp args were passed in registers.
3988 Assume that we don't need the marker for software floating point,
3989 or compiler generated library calls. */
3990 if (mode == VOIDmode)
3991 {
3992 if (abi == ABI_V4
3993 && cum->nargs_prototype < 0
3994 && (cum->call_cookie & CALL_LIBCALL) == 0
3995 && (cum->prototype || TARGET_NO_PROTOTYPE))
3996 {
3997 /* For the SPE, we need to crxor CR6 always. */
3998 if (TARGET_SPE_ABI)
3999 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
4000 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
4001 return GEN_INT (cum->call_cookie
4002 | ((cum->fregno == FP_ARG_MIN_REG)
4003 ? CALL_V4_SET_FP_ARGS
4004 : CALL_V4_CLEAR_FP_ARGS));
4005 }
4006
4007 return GEN_INT (cum->call_cookie);
4008 }
4009
4010 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4011 {
4012 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
4013 return gen_rtx_REG (mode, cum->vregno);
4014 else
4015 return NULL;
4016 }
4017 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode))
4018 return rs6000_spe_function_arg (cum, mode, type);
4019 else if (abi == ABI_V4)
4020 {
4021 if (TARGET_HARD_FLOAT && TARGET_FPRS
4022 && (mode == SFmode || mode == DFmode))
4023 {
4024 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4025 return gen_rtx_REG (mode, cum->fregno);
4026 else
4027 return NULL;
4028 }
4029 else
4030 {
4031 int n_words;
4032 int gregno = cum->sysv_gregno;
4033
4034 /* Aggregates and IEEE quad get passed by reference. */
4035 if ((type && AGGREGATE_TYPE_P (type))
4036 || mode == TFmode)
4037 n_words = 1;
4038 else
4039 n_words = RS6000_ARG_SIZE (mode, type);
4040
4041 /* Long long and SPE vectors are put in odd registers. */
4042 if (n_words == 2 && (gregno & 1) == 0)
4043 gregno += 1;
4044
4045 /* Long long do not split between registers and stack. */
4046 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4047 return gen_rtx_REG (mode, gregno);
4048 else
4049 return NULL;
4050 }
4051 }
4052 else
4053 {
4054 int align = (TARGET_32BIT && (cum->words & 1) != 0
4055 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
4056 int align_words = cum->words + align;
4057
4058 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4059 return NULL_RTX;
4060
4061 if (USE_FP_FOR_ARG_P (*cum, mode, type))
4062 {
4063 if (! type
4064 || ((cum->nargs_prototype > 0)
4065 /* IBM AIX extended its linkage convention definition always
4066 to require FP args after register save area hole on the
4067 stack. */
4068 && (DEFAULT_ABI != ABI_AIX
4069 || ! TARGET_XL_CALL
4070 || (align_words < GP_ARG_NUM_REG))))
4071 return gen_rtx_REG (mode, cum->fregno);
4072
4073 return gen_rtx_PARALLEL (mode,
4074 gen_rtvec (2,
4075 gen_rtx_EXPR_LIST (VOIDmode,
4076 ((align_words >= GP_ARG_NUM_REG)
4077 ? NULL_RTX
4078 : (align_words
4079 + RS6000_ARG_SIZE (mode, type)
4080 > GP_ARG_NUM_REG
4081 /* If this is partially on the stack, then
4082 we only include the portion actually
4083 in registers here. */
4084 ? gen_rtx_REG (SImode,
4085 GP_ARG_MIN_REG + align_words)
4086 : gen_rtx_REG (mode,
4087 GP_ARG_MIN_REG + align_words))),
4088 const0_rtx),
4089 gen_rtx_EXPR_LIST (VOIDmode,
4090 gen_rtx_REG (mode, cum->fregno),
4091 const0_rtx)));
4092 }
4093 else if (align_words < GP_ARG_NUM_REG)
4094 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4095 else
4096 return NULL_RTX;
4097 }
4098 }
4099 \f
4100 /* For an arg passed partly in registers and partly in memory,
4101 this is the number of registers used.
4102 For args passed entirely in registers or entirely in memory, zero. */
4103
4104 int
4105 function_arg_partial_nregs (cum, mode, type, named)
4106 CUMULATIVE_ARGS *cum;
4107 enum machine_mode mode;
4108 tree type;
4109 int named ATTRIBUTE_UNUSED;
4110 {
4111 if (DEFAULT_ABI == ABI_V4)
4112 return 0;
4113
4114 if (USE_FP_FOR_ARG_P (*cum, mode, type)
4115 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
4116 {
4117 if (cum->nargs_prototype >= 0)
4118 return 0;
4119 }
4120
4121 if (cum->words < GP_ARG_NUM_REG
4122 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
4123 {
4124 int ret = GP_ARG_NUM_REG - cum->words;
4125 if (ret && TARGET_DEBUG_ARG)
4126 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
4127
4128 return ret;
4129 }
4130
4131 return 0;
4132 }
4133 \f
4134 /* A C expression that indicates when an argument must be passed by
4135 reference. If nonzero for an argument, a copy of that argument is
4136 made in memory and a pointer to the argument is passed instead of
4137 the argument itself. The pointer is passed in whatever way is
4138 appropriate for passing a pointer to that type.
4139
4140 Under V.4, structures and unions are passed by reference.
4141
4142 As an extension to all ABIs, variable sized types are passed by
4143 reference. */
4144
4145 int
4146 function_arg_pass_by_reference (cum, mode, type, named)
4147 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
4148 enum machine_mode mode ATTRIBUTE_UNUSED;
4149 tree type;
4150 int named ATTRIBUTE_UNUSED;
4151 {
4152 if (DEFAULT_ABI == ABI_V4
4153 && ((type && AGGREGATE_TYPE_P (type))
4154 || mode == TFmode))
4155 {
4156 if (TARGET_DEBUG_ARG)
4157 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
4158
4159 return 1;
4160 }
4161 return type && int_size_in_bytes (type) <= 0;
4162 }
4163 \f
4164 /* Perform any needed actions needed for a function that is receiving a
4165 variable number of arguments.
4166
4167 CUM is as above.
4168
4169 MODE and TYPE are the mode and type of the current parameter.
4170
4171 PRETEND_SIZE is a variable that should be set to the amount of stack
4172 that must be pushed by the prolog to pretend that our caller pushed
4173 it.
4174
4175 Normally, this macro will push all remaining incoming registers on the
4176 stack and set PRETEND_SIZE to the length of the registers pushed. */
4177
4178 void
4179 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
4180 CUMULATIVE_ARGS *cum;
4181 enum machine_mode mode;
4182 tree type;
4183 int *pretend_size ATTRIBUTE_UNUSED;
4184 int no_rtl;
4185
4186 {
4187 CUMULATIVE_ARGS next_cum;
4188 int reg_size = TARGET_32BIT ? 4 : 8;
4189 rtx save_area = NULL_RTX, mem;
4190 int first_reg_offset, set;
4191 tree fntype;
4192 int stdarg_p;
4193
4194 fntype = TREE_TYPE (current_function_decl);
4195 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
4196 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4197 != void_type_node));
4198
4199 /* For varargs, we do not want to skip the dummy va_dcl argument.
4200 For stdargs, we do want to skip the last named argument. */
4201 next_cum = *cum;
4202 if (stdarg_p)
4203 function_arg_advance (&next_cum, mode, type, 1);
4204
4205 if (DEFAULT_ABI == ABI_V4)
4206 {
4207 /* Indicate to allocate space on the stack for varargs save area. */
4208 cfun->machine->sysv_varargs_p = 1;
4209 if (! no_rtl)
4210 save_area = plus_constant (virtual_stack_vars_rtx,
4211 - RS6000_VARARGS_SIZE);
4212
4213 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
4214 }
4215 else
4216 {
4217 first_reg_offset = next_cum.words;
4218 save_area = virtual_incoming_args_rtx;
4219 cfun->machine->sysv_varargs_p = 0;
4220
4221 if (MUST_PASS_IN_STACK (mode, type))
4222 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
4223 }
4224
4225 set = get_varargs_alias_set ();
4226 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
4227 {
4228 mem = gen_rtx_MEM (BLKmode,
4229 plus_constant (save_area,
4230 first_reg_offset * reg_size)),
4231 set_mem_alias_set (mem, set);
4232 set_mem_align (mem, BITS_PER_WORD);
4233
4234 move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
4235 GP_ARG_NUM_REG - first_reg_offset);
4236 }
4237
4238 /* Save FP registers if needed. */
4239 if (DEFAULT_ABI == ABI_V4
4240 && TARGET_HARD_FLOAT && TARGET_FPRS
4241 && ! no_rtl
4242 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
4243 {
4244 int fregno = next_cum.fregno;
4245 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4246 rtx lab = gen_label_rtx ();
4247 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
4248
4249 emit_jump_insn (gen_rtx_SET (VOIDmode,
4250 pc_rtx,
4251 gen_rtx_IF_THEN_ELSE (VOIDmode,
4252 gen_rtx_NE (VOIDmode, cr1,
4253 const0_rtx),
4254 gen_rtx_LABEL_REF (VOIDmode, lab),
4255 pc_rtx)));
4256
4257 while (fregno <= FP_ARG_V4_MAX_REG)
4258 {
4259 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
4260 set_mem_alias_set (mem, set);
4261 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4262 fregno++;
4263 off += 8;
4264 }
4265
4266 emit_label (lab);
4267 }
4268 }
4269
4270 /* Create the va_list data type. */
4271
4272 tree
4273 rs6000_build_va_list ()
4274 {
4275 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4276
4277 /* For AIX, prefer 'char *' because that's what the system
4278 header files like. */
4279 if (DEFAULT_ABI != ABI_V4)
4280 return build_pointer_type (char_type_node);
4281
4282 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4283 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
4284
4285 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
4286 unsigned_char_type_node);
4287 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
4288 unsigned_char_type_node);
4289 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
4290 every user file. */
4291 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
4292 short_unsigned_type_node);
4293 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
4294 ptr_type_node);
4295 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
4296 ptr_type_node);
4297
4298 DECL_FIELD_CONTEXT (f_gpr) = record;
4299 DECL_FIELD_CONTEXT (f_fpr) = record;
4300 DECL_FIELD_CONTEXT (f_res) = record;
4301 DECL_FIELD_CONTEXT (f_ovf) = record;
4302 DECL_FIELD_CONTEXT (f_sav) = record;
4303
4304 TREE_CHAIN (record) = type_decl;
4305 TYPE_NAME (record) = type_decl;
4306 TYPE_FIELDS (record) = f_gpr;
4307 TREE_CHAIN (f_gpr) = f_fpr;
4308 TREE_CHAIN (f_fpr) = f_res;
4309 TREE_CHAIN (f_res) = f_ovf;
4310 TREE_CHAIN (f_ovf) = f_sav;
4311
4312 layout_type (record);
4313
4314 /* The correct type is an array type of one element. */
4315 return build_array_type (record, build_index_type (size_zero_node));
4316 }
4317
4318 /* Implement va_start. */
4319
4320 void
4321 rs6000_va_start (valist, nextarg)
4322 tree valist;
4323 rtx nextarg;
4324 {
4325 HOST_WIDE_INT words, n_gpr, n_fpr;
4326 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4327 tree gpr, fpr, ovf, sav, t;
4328
4329 /* Only SVR4 needs something special. */
4330 if (DEFAULT_ABI != ABI_V4)
4331 {
4332 std_expand_builtin_va_start (valist, nextarg);
4333 return;
4334 }
4335
4336 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4337 f_fpr = TREE_CHAIN (f_gpr);
4338 f_res = TREE_CHAIN (f_fpr);
4339 f_ovf = TREE_CHAIN (f_res);
4340 f_sav = TREE_CHAIN (f_ovf);
4341
4342 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4343 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4344 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4345 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4346 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4347
4348 /* Count number of gp and fp argument registers used. */
4349 words = current_function_args_info.words;
4350 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
4351 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
4352
4353 if (TARGET_DEBUG_ARG)
4354 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
4355 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
4356 words, n_gpr, n_fpr);
4357
4358 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
4359 TREE_SIDE_EFFECTS (t) = 1;
4360 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4361
4362 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
4363 TREE_SIDE_EFFECTS (t) = 1;
4364 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4365
4366 /* Find the overflow area. */
4367 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
4368 if (words != 0)
4369 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
4370 build_int_2 (words * UNITS_PER_WORD, 0));
4371 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4372 TREE_SIDE_EFFECTS (t) = 1;
4373 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4374
4375 /* Find the register save area. */
4376 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
4377 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
4378 build_int_2 (-RS6000_VARARGS_SIZE, -1));
4379 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
4380 TREE_SIDE_EFFECTS (t) = 1;
4381 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4382 }
4383
4384 /* Implement va_arg. */
4385
4386 rtx
4387 rs6000_va_arg (valist, type)
4388 tree valist, type;
4389 {
4390 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4391 tree gpr, fpr, ovf, sav, reg, t, u;
4392 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
4393 rtx lab_false, lab_over, addr_rtx, r;
4394
4395 if (DEFAULT_ABI != ABI_V4)
4396 {
4397 /* Variable sized types are passed by reference. */
4398 if (int_size_in_bytes (type) <= 0)
4399 {
4400 u = build_pointer_type (type);
4401
4402 /* Args grow upward. */
4403 t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
4404 build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
4405 TREE_SIDE_EFFECTS (t) = 1;
4406
4407 t = build1 (NOP_EXPR, build_pointer_type (u), t);
4408 TREE_SIDE_EFFECTS (t) = 1;
4409
4410 t = build1 (INDIRECT_REF, u, t);
4411 TREE_SIDE_EFFECTS (t) = 1;
4412
4413 return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4414 }
4415 else
4416 return std_expand_builtin_va_arg (valist, type);
4417 }
4418
4419 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4420 f_fpr = TREE_CHAIN (f_gpr);
4421 f_res = TREE_CHAIN (f_fpr);
4422 f_ovf = TREE_CHAIN (f_res);
4423 f_sav = TREE_CHAIN (f_ovf);
4424
4425 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4426 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4427 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4428 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4429 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4430
4431 size = int_size_in_bytes (type);
4432 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4433
4434 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
4435 {
4436 /* Aggregates and long doubles are passed by reference. */
4437 indirect_p = 1;
4438 reg = gpr;
4439 n_reg = 1;
4440 sav_ofs = 0;
4441 sav_scale = 4;
4442 size = UNITS_PER_WORD;
4443 rsize = 1;
4444 }
4445 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
4446 {
4447 /* FP args go in FP registers, if present. */
4448 indirect_p = 0;
4449 reg = fpr;
4450 n_reg = 1;
4451 sav_ofs = 8*4;
4452 sav_scale = 8;
4453 }
4454 else
4455 {
4456 /* Otherwise into GP registers. */
4457 indirect_p = 0;
4458 reg = gpr;
4459 n_reg = rsize;
4460 sav_ofs = 0;
4461 sav_scale = 4;
4462 }
4463
4464 /* Pull the value out of the saved registers ... */
4465
4466 lab_false = gen_label_rtx ();
4467 lab_over = gen_label_rtx ();
4468 addr_rtx = gen_reg_rtx (Pmode);
4469
4470 /* AltiVec vectors never go in registers. */
4471 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
4472 {
4473 TREE_THIS_VOLATILE (reg) = 1;
4474 emit_cmp_and_jump_insns
4475 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
4476 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
4477 lab_false);
4478
4479 /* Long long is aligned in the registers. */
4480 if (n_reg > 1)
4481 {
4482 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
4483 build_int_2 (n_reg - 1, 0));
4484 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
4485 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
4486 TREE_SIDE_EFFECTS (u) = 1;
4487 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
4488 }
4489
4490 if (sav_ofs)
4491 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
4492 else
4493 t = sav;
4494
4495 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
4496 build_int_2 (n_reg, 0));
4497 TREE_SIDE_EFFECTS (u) = 1;
4498
4499 u = build1 (CONVERT_EXPR, integer_type_node, u);
4500 TREE_SIDE_EFFECTS (u) = 1;
4501
4502 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
4503 TREE_SIDE_EFFECTS (u) = 1;
4504
4505 t = build (PLUS_EXPR, ptr_type_node, t, u);
4506 TREE_SIDE_EFFECTS (t) = 1;
4507
4508 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4509 if (r != addr_rtx)
4510 emit_move_insn (addr_rtx, r);
4511
4512 emit_jump_insn (gen_jump (lab_over));
4513 emit_barrier ();
4514 }
4515
4516 emit_label (lab_false);
4517
4518 /* ... otherwise out of the overflow area. */
4519
4520 /* Make sure we don't find reg 7 for the next int arg.
4521
4522 All AltiVec vectors go in the overflow area. So in the AltiVec
4523 case we need to get the vectors from the overflow area, but
4524 remember where the GPRs and FPRs are. */
4525 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
4526 || !TARGET_ALTIVEC))
4527 {
4528 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
4529 TREE_SIDE_EFFECTS (t) = 1;
4530 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4531 }
4532
4533 /* Care for on-stack alignment if needed. */
4534 if (rsize <= 1)
4535 t = ovf;
4536 else
4537 {
4538 int align;
4539
4540 /* AltiVec vectors are 16 byte aligned. */
4541 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
4542 align = 15;
4543 else
4544 align = 7;
4545
4546 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
4547 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
4548 }
4549 t = save_expr (t);
4550
4551 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4552 if (r != addr_rtx)
4553 emit_move_insn (addr_rtx, r);
4554
4555 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
4556 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4557 TREE_SIDE_EFFECTS (t) = 1;
4558 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4559
4560 emit_label (lab_over);
4561
4562 if (indirect_p)
4563 {
4564 r = gen_rtx_MEM (Pmode, addr_rtx);
4565 set_mem_alias_set (r, get_varargs_alias_set ());
4566 emit_move_insn (addr_rtx, r);
4567 }
4568
4569 return addr_rtx;
4570 }
4571
4572 /* Builtins. */
4573
4574 #define def_builtin(MASK, NAME, TYPE, CODE) \
4575 do { \
4576 if ((MASK) & target_flags) \
4577 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
4578 NULL, NULL_TREE); \
4579 } while (0)
4580
4581 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
4582
4583 static const struct builtin_description bdesc_3arg[] =
4584 {
4585 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
4586 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
4587 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
4588 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
4589 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
4590 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
4591 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
4592 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
4593 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
4594 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
4595 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
4596 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
4597 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
4598 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
4599 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
4600 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
4601 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
4602 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
4603 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
4604 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
4605 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
4606 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
4607 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
4608 };
4609
4610 /* DST operations: void foo (void *, const int, const char). */
4611
4612 static const struct builtin_description bdesc_dst[] =
4613 {
4614 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
4615 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
4616 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
4617 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
4618 };
4619
4620 /* Simple binary operations: VECc = foo (VECa, VECb). */
4621
4622 static struct builtin_description bdesc_2arg[] =
4623 {
4624 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
4625 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
4626 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
4627 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
4628 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
4629 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
4630 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
4631 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
4632 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
4633 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
4634 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
4635 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
4636 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
4637 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
4638 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
4639 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
4640 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
4641 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
4642 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
4643 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
4644 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
4645 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
4646 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
4647 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
4648 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
4649 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
4650 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
4651 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
4652 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
4653 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
4654 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
4655 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
4656 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
4657 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
4658 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
4659 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
4660 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
4661 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
4662 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
4663 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
4664 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
4665 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
4666 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
4667 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
4668 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
4669 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
4670 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
4671 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
4672 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
4673 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
4674 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
4675 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
4676 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
4677 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
4678 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
4679 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
4680 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
4681 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
4682 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
4683 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
4684 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
4685 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
4686 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
4687 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
4688 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
4689 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
4690 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
4691 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
4692 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
4693 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
4694 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
4695 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
4696 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
4697 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
4698 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
4699 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
4700 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
4701 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
4702 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
4703 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
4704 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
4705 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
4706 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
4707 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
4708 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
4709 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
4710 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
4711 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
4712 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
4713 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
4714 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
4715 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
4716 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
4717 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
4718 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
4719 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
4720 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
4721 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
4722 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
4723 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
4724 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
4725 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
4726 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
4727 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
4728 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
4729 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
4730 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
4731 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
4732 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
4733 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
4734 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
4735 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
4736 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
4737
4738 /* Place holder, leave as first spe builtin. */
4739 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
4740 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
4741 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
4742 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
4743 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
4744 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
4745 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
4746 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
4747 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
4748 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
4749 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
4750 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
4751 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
4752 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
4753 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
4754 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
4755 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
4756 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
4757 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
4758 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
4759 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
4760 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
4761 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
4762 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
4763 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
4764 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
4765 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
4766 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
4767 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
4768 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
4769 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
4770 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
4771 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
4772 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
4773 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
4774 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
4775 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
4776 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
4777 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
4778 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
4779 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
4780 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
4781 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
4782 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
4783 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
4784 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
4785 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
4786 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
4787 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
4788 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
4789 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
4790 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
4791 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
4792 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
4793 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
4794 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
4795 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
4796 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
4797 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
4798 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
4799 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
4800 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
4801 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
4802 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
4803 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
4804 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
4805 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
4806 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
4807 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
4808 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
4809 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
4810 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
4811 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
4812 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
4813 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
4814 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
4815 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
4816 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
4817 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
4818 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
4819 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
4820 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
4821 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
4822 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
4823 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
4824 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
4825 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
4826 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
4827 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
4828 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
4829 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
4830 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
4831 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
4832 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
4833 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
4834 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
4835 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
4836 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
4837 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
4838 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
4839 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
4840 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
4841 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
4842 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
4843 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
4844 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
4845 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
4846 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
4847 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
4848
4849 /* SPE binary operations expecting a 5-bit unsigned literal. */
4850 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
4851
4852 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
4853 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
4854 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
4855 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
4856 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
4857 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
4858 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
4859 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
4860 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
4861 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
4862 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
4863 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
4864 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
4865 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
4866 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
4867 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
4868 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
4869 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
4870 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
4871 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
4872 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
4873 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
4874 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
4875 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
4876 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
4877 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
4878
4879 /* Place-holder. Leave as last binary SPE builtin. */
4880 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
4881 };
4882
4883 /* AltiVec predicates. */
4884
4885 struct builtin_description_predicates
4886 {
4887 const unsigned int mask;
4888 const enum insn_code icode;
4889 const char *opcode;
4890 const char *const name;
4891 const enum rs6000_builtins code;
4892 };
4893
4894 static const struct builtin_description_predicates bdesc_altivec_preds[] =
4895 {
4896 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
4897 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
4898 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
4899 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
4900 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
4901 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
4902 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
4903 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
4904 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
4905 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
4906 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
4907 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
4908 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
4909 };
4910
4911 /* SPE predicates. */
4912 static struct builtin_description bdesc_spe_predicates[] =
4913 {
4914 /* Place-holder. Leave as first. */
4915 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
4916 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
4917 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
4918 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
4919 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
4920 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
4921 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
4922 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
4923 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
4924 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
4925 /* Place-holder. Leave as last. */
4926 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
4927 };
4928
4929 /* SPE evsel predicates. */
4930 static struct builtin_description bdesc_spe_evsel[] =
4931 {
4932 /* Place-holder. Leave as first. */
4933 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
4934 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
4935 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
4936 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
4937 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
4938 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
4939 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
4940 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
4941 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
4942 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
4943 /* Place-holder. Leave as last. */
4944 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
4945 };
4946
4947 /* ABS* operations. */
4948
4949 static const struct builtin_description bdesc_abs[] =
4950 {
4951 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
4952 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
4953 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
4954 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
4955 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
4956 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
4957 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
4958 };
4959
4960 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4961 foo (VECa). */
4962
4963 static struct builtin_description bdesc_1arg[] =
4964 {
4965 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
4966 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
4967 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
4968 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
4969 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
4970 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
4971 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
4972 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
4973 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
4974 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
4975 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
4976 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
4977 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
4978 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
4979 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
4980 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
4981 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
4982
4983 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4984 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4985 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
4986 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
4987 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
4988 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
4989 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
4990 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
4991 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
4992 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
4993 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
4994 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
4995 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
4996 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
4997 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
4998 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
4999 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
5000 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
5001 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
5002 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
5003 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
5004 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
5005 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
5006 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
5007 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
5008 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
5009 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
5010 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
5011 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
5012 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
5013 { 0, CODE_FOR_spe_evsplatfi, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI },
5014 { 0, CODE_FOR_spe_evsplati, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI },
5015
5016 /* Place-holder. Leave as last unary SPE builtin. */
5017 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
5018 };
5019
5020 static rtx
5021 rs6000_expand_unop_builtin (icode, arglist, target)
5022 enum insn_code icode;
5023 tree arglist;
5024 rtx target;
5025 {
5026 rtx pat;
5027 tree arg0 = TREE_VALUE (arglist);
5028 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5029 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5030 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5031
5032 if (icode == CODE_FOR_nothing)
5033 /* Builtin not supported on this processor. */
5034 return 0;
5035
5036 /* If we got invalid arguments bail out before generating bad rtl. */
5037 if (arg0 == error_mark_node)
5038 return const0_rtx;
5039
5040 if (icode == CODE_FOR_altivec_vspltisb
5041 || icode == CODE_FOR_altivec_vspltish
5042 || icode == CODE_FOR_altivec_vspltisw
5043 || icode == CODE_FOR_spe_evsplatfi
5044 || icode == CODE_FOR_spe_evsplati)
5045 {
5046 /* Only allow 5-bit *signed* literals. */
5047 if (GET_CODE (op0) != CONST_INT
5048 || INTVAL (op0) > 0x1f
5049 || INTVAL (op0) < -0x1f)
5050 {
5051 error ("argument 1 must be a 5-bit signed literal");
5052 return const0_rtx;
5053 }
5054 }
5055
5056 if (target == 0
5057 || GET_MODE (target) != tmode
5058 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5059 target = gen_reg_rtx (tmode);
5060
5061 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5062 op0 = copy_to_mode_reg (mode0, op0);
5063
5064 pat = GEN_FCN (icode) (target, op0);
5065 if (! pat)
5066 return 0;
5067 emit_insn (pat);
5068
5069 return target;
5070 }
5071
5072 static rtx
5073 altivec_expand_abs_builtin (icode, arglist, target)
5074 enum insn_code icode;
5075 tree arglist;
5076 rtx target;
5077 {
5078 rtx pat, scratch1, scratch2;
5079 tree arg0 = TREE_VALUE (arglist);
5080 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5081 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5082 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5083
5084 /* If we have invalid arguments, bail out before generating bad rtl. */
5085 if (arg0 == error_mark_node)
5086 return const0_rtx;
5087
5088 if (target == 0
5089 || GET_MODE (target) != tmode
5090 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5091 target = gen_reg_rtx (tmode);
5092
5093 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5094 op0 = copy_to_mode_reg (mode0, op0);
5095
5096 scratch1 = gen_reg_rtx (mode0);
5097 scratch2 = gen_reg_rtx (mode0);
5098
5099 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
5100 if (! pat)
5101 return 0;
5102 emit_insn (pat);
5103
5104 return target;
5105 }
5106
5107 static rtx
5108 rs6000_expand_binop_builtin (icode, arglist, target)
5109 enum insn_code icode;
5110 tree arglist;
5111 rtx target;
5112 {
5113 rtx pat;
5114 tree arg0 = TREE_VALUE (arglist);
5115 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5116 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5117 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5118 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5119 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5120 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5121
5122 if (icode == CODE_FOR_nothing)
5123 /* Builtin not supported on this processor. */
5124 return 0;
5125
5126 /* If we got invalid arguments bail out before generating bad rtl. */
5127 if (arg0 == error_mark_node || arg1 == error_mark_node)
5128 return const0_rtx;
5129
5130 if (icode == CODE_FOR_altivec_vcfux
5131 || icode == CODE_FOR_altivec_vcfsx
5132 || icode == CODE_FOR_altivec_vctsxs
5133 || icode == CODE_FOR_altivec_vctuxs
5134 || icode == CODE_FOR_altivec_vspltb
5135 || icode == CODE_FOR_altivec_vsplth
5136 || icode == CODE_FOR_altivec_vspltw
5137 || icode == CODE_FOR_spe_evaddiw
5138 || icode == CODE_FOR_spe_evldd
5139 || icode == CODE_FOR_spe_evldh
5140 || icode == CODE_FOR_spe_evldw
5141 || icode == CODE_FOR_spe_evlhhesplat
5142 || icode == CODE_FOR_spe_evlhhossplat
5143 || icode == CODE_FOR_spe_evlhhousplat
5144 || icode == CODE_FOR_spe_evlwhe
5145 || icode == CODE_FOR_spe_evlwhos
5146 || icode == CODE_FOR_spe_evlwhou
5147 || icode == CODE_FOR_spe_evlwhsplat
5148 || icode == CODE_FOR_spe_evlwwsplat
5149 || icode == CODE_FOR_spe_evrlwi
5150 || icode == CODE_FOR_spe_evslwi
5151 || icode == CODE_FOR_spe_evsrwis
5152 || icode == CODE_FOR_spe_evsubifw
5153 || icode == CODE_FOR_spe_evsrwiu)
5154 {
5155 /* Only allow 5-bit unsigned literals. */
5156 if (TREE_CODE (arg1) != INTEGER_CST
5157 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5158 {
5159 error ("argument 2 must be a 5-bit unsigned literal");
5160 return const0_rtx;
5161 }
5162 }
5163
5164 if (target == 0
5165 || GET_MODE (target) != tmode
5166 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5167 target = gen_reg_rtx (tmode);
5168
5169 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5170 op0 = copy_to_mode_reg (mode0, op0);
5171 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5172 op1 = copy_to_mode_reg (mode1, op1);
5173
5174 pat = GEN_FCN (icode) (target, op0, op1);
5175 if (! pat)
5176 return 0;
5177 emit_insn (pat);
5178
5179 return target;
5180 }
5181
5182 static rtx
5183 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
5184 enum insn_code icode;
5185 const char *opcode;
5186 tree arglist;
5187 rtx target;
5188 {
5189 rtx pat, scratch;
5190 tree cr6_form = TREE_VALUE (arglist);
5191 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5192 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5193 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5194 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5195 enum machine_mode tmode = SImode;
5196 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5197 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5198 int cr6_form_int;
5199
5200 if (TREE_CODE (cr6_form) != INTEGER_CST)
5201 {
5202 error ("argument 1 of __builtin_altivec_predicate must be a constant");
5203 return const0_rtx;
5204 }
5205 else
5206 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
5207
5208 if (mode0 != mode1)
5209 abort ();
5210
5211 /* If we have invalid arguments, bail out before generating bad rtl. */
5212 if (arg0 == error_mark_node || arg1 == error_mark_node)
5213 return const0_rtx;
5214
5215 if (target == 0
5216 || GET_MODE (target) != tmode
5217 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5218 target = gen_reg_rtx (tmode);
5219
5220 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5221 op0 = copy_to_mode_reg (mode0, op0);
5222 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5223 op1 = copy_to_mode_reg (mode1, op1);
5224
5225 scratch = gen_reg_rtx (mode0);
5226
5227 pat = GEN_FCN (icode) (scratch, op0, op1,
5228 gen_rtx (SYMBOL_REF, Pmode, opcode));
5229 if (! pat)
5230 return 0;
5231 emit_insn (pat);
5232
5233 /* The vec_any* and vec_all* predicates use the same opcodes for two
5234 different operations, but the bits in CR6 will be different
5235 depending on what information we want. So we have to play tricks
5236 with CR6 to get the right bits out.
5237
5238 If you think this is disgusting, look at the specs for the
5239 AltiVec predicates. */
5240
5241 switch (cr6_form_int)
5242 {
5243 case 0:
5244 emit_insn (gen_cr6_test_for_zero (target));
5245 break;
5246 case 1:
5247 emit_insn (gen_cr6_test_for_zero_reverse (target));
5248 break;
5249 case 2:
5250 emit_insn (gen_cr6_test_for_lt (target));
5251 break;
5252 case 3:
5253 emit_insn (gen_cr6_test_for_lt_reverse (target));
5254 break;
5255 default:
5256 error ("argument 1 of __builtin_altivec_predicate is out of range");
5257 break;
5258 }
5259
5260 return target;
5261 }
5262
5263 static rtx
5264 altivec_expand_stv_builtin (icode, arglist)
5265 enum insn_code icode;
5266 tree arglist;
5267 {
5268 tree arg0 = TREE_VALUE (arglist);
5269 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5270 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5271 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5272 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5273 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5274 rtx pat;
5275 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
5276 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
5277 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
5278
5279 /* Invalid arguments. Bail before doing anything stoopid! */
5280 if (arg0 == error_mark_node
5281 || arg1 == error_mark_node
5282 || arg2 == error_mark_node)
5283 return const0_rtx;
5284
5285 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
5286 op0 = copy_to_mode_reg (mode2, op0);
5287 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
5288 op1 = copy_to_mode_reg (mode0, op1);
5289 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5290 op2 = copy_to_mode_reg (mode1, op2);
5291
5292 pat = GEN_FCN (icode) (op1, op2, op0);
5293 if (pat)
5294 emit_insn (pat);
5295 return NULL_RTX;
5296 }
5297
5298 static rtx
5299 rs6000_expand_ternop_builtin (icode, arglist, target)
5300 enum insn_code icode;
5301 tree arglist;
5302 rtx target;
5303 {
5304 rtx pat;
5305 tree arg0 = TREE_VALUE (arglist);
5306 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5307 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5308 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5309 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5310 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5311 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5312 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5313 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5314 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
5315
5316 if (icode == CODE_FOR_nothing)
5317 /* Builtin not supported on this processor. */
5318 return 0;
5319
5320 /* If we got invalid arguments bail out before generating bad rtl. */
5321 if (arg0 == error_mark_node
5322 || arg1 == error_mark_node
5323 || arg2 == error_mark_node)
5324 return const0_rtx;
5325
5326 if (icode == CODE_FOR_altivec_vsldoi_4sf
5327 || icode == CODE_FOR_altivec_vsldoi_4si
5328 || icode == CODE_FOR_altivec_vsldoi_8hi
5329 || icode == CODE_FOR_altivec_vsldoi_16qi)
5330 {
5331 /* Only allow 4-bit unsigned literals. */
5332 if (TREE_CODE (arg2) != INTEGER_CST
5333 || TREE_INT_CST_LOW (arg2) & ~0xf)
5334 {
5335 error ("argument 3 must be a 4-bit unsigned literal");
5336 return const0_rtx;
5337 }
5338 }
5339
5340 if (target == 0
5341 || GET_MODE (target) != tmode
5342 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5343 target = gen_reg_rtx (tmode);
5344
5345 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5346 op0 = copy_to_mode_reg (mode0, op0);
5347 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5348 op1 = copy_to_mode_reg (mode1, op1);
5349 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
5350 op2 = copy_to_mode_reg (mode2, op2);
5351
5352 pat = GEN_FCN (icode) (target, op0, op1, op2);
5353 if (! pat)
5354 return 0;
5355 emit_insn (pat);
5356
5357 return target;
5358 }
5359
5360 /* Expand the lvx builtins. */
5361 static rtx
5362 altivec_expand_ld_builtin (exp, target, expandedp)
5363 tree exp;
5364 rtx target;
5365 bool *expandedp;
5366 {
5367 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5368 tree arglist = TREE_OPERAND (exp, 1);
5369 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5370 tree arg0;
5371 enum machine_mode tmode, mode0;
5372 rtx pat, op0;
5373 enum insn_code icode;
5374
5375 switch (fcode)
5376 {
5377 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
5378 icode = CODE_FOR_altivec_lvx_16qi;
5379 break;
5380 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
5381 icode = CODE_FOR_altivec_lvx_8hi;
5382 break;
5383 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
5384 icode = CODE_FOR_altivec_lvx_4si;
5385 break;
5386 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
5387 icode = CODE_FOR_altivec_lvx_4sf;
5388 break;
5389 default:
5390 *expandedp = false;
5391 return NULL_RTX;
5392 }
5393
5394 *expandedp = true;
5395
5396 arg0 = TREE_VALUE (arglist);
5397 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5398 tmode = insn_data[icode].operand[0].mode;
5399 mode0 = insn_data[icode].operand[1].mode;
5400
5401 if (target == 0
5402 || GET_MODE (target) != tmode
5403 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5404 target = gen_reg_rtx (tmode);
5405
5406 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5407 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5408
5409 pat = GEN_FCN (icode) (target, op0);
5410 if (! pat)
5411 return 0;
5412 emit_insn (pat);
5413 return target;
5414 }
5415
5416 /* Expand the stvx builtins. */
5417 static rtx
5418 altivec_expand_st_builtin (exp, target, expandedp)
5419 tree exp;
5420 rtx target ATTRIBUTE_UNUSED;
5421 bool *expandedp;
5422 {
5423 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5424 tree arglist = TREE_OPERAND (exp, 1);
5425 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5426 tree arg0, arg1;
5427 enum machine_mode mode0, mode1;
5428 rtx pat, op0, op1;
5429 enum insn_code icode;
5430
5431 switch (fcode)
5432 {
5433 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
5434 icode = CODE_FOR_altivec_stvx_16qi;
5435 break;
5436 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
5437 icode = CODE_FOR_altivec_stvx_8hi;
5438 break;
5439 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
5440 icode = CODE_FOR_altivec_stvx_4si;
5441 break;
5442 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
5443 icode = CODE_FOR_altivec_stvx_4sf;
5444 break;
5445 default:
5446 *expandedp = false;
5447 return NULL_RTX;
5448 }
5449
5450 arg0 = TREE_VALUE (arglist);
5451 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5452 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5453 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5454 mode0 = insn_data[icode].operand[0].mode;
5455 mode1 = insn_data[icode].operand[1].mode;
5456
5457 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5458 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5459 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5460 op1 = copy_to_mode_reg (mode1, op1);
5461
5462 pat = GEN_FCN (icode) (op0, op1);
5463 if (pat)
5464 emit_insn (pat);
5465
5466 *expandedp = true;
5467 return NULL_RTX;
5468 }
5469
5470 /* Expand the dst builtins. */
5471 static rtx
5472 altivec_expand_dst_builtin (exp, target, expandedp)
5473 tree exp;
5474 rtx target ATTRIBUTE_UNUSED;
5475 bool *expandedp;
5476 {
5477 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5478 tree arglist = TREE_OPERAND (exp, 1);
5479 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5480 tree arg0, arg1, arg2;
5481 enum machine_mode mode0, mode1, mode2;
5482 rtx pat, op0, op1, op2;
5483 struct builtin_description *d;
5484 size_t i;
5485
5486 *expandedp = false;
5487
5488 /* Handle DST variants. */
5489 d = (struct builtin_description *) bdesc_dst;
5490 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
5491 if (d->code == fcode)
5492 {
5493 arg0 = TREE_VALUE (arglist);
5494 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5495 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5496 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5497 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5498 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5499 mode0 = insn_data[d->icode].operand[0].mode;
5500 mode1 = insn_data[d->icode].operand[1].mode;
5501 mode2 = insn_data[d->icode].operand[2].mode;
5502
5503 /* Invalid arguments, bail out before generating bad rtl. */
5504 if (arg0 == error_mark_node
5505 || arg1 == error_mark_node
5506 || arg2 == error_mark_node)
5507 return const0_rtx;
5508
5509 if (TREE_CODE (arg2) != INTEGER_CST
5510 || TREE_INT_CST_LOW (arg2) & ~0x3)
5511 {
5512 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
5513 return const0_rtx;
5514 }
5515
5516 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
5517 op0 = copy_to_mode_reg (mode0, op0);
5518 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
5519 op1 = copy_to_mode_reg (mode1, op1);
5520
5521 pat = GEN_FCN (d->icode) (op0, op1, op2);
5522 if (pat != 0)
5523 emit_insn (pat);
5524
5525 *expandedp = true;
5526 return NULL_RTX;
5527 }
5528
5529 return NULL_RTX;
5530 }
5531
5532 /* Expand the builtin in EXP and store the result in TARGET. Store
5533 true in *EXPANDEDP if we found a builtin to expand. */
5534 static rtx
5535 altivec_expand_builtin (exp, target, expandedp)
5536 tree exp;
5537 rtx target;
5538 bool *expandedp;
5539 {
5540 struct builtin_description *d;
5541 struct builtin_description_predicates *dp;
5542 size_t i;
5543 enum insn_code icode;
5544 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5545 tree arglist = TREE_OPERAND (exp, 1);
5546 tree arg0;
5547 rtx op0, pat;
5548 enum machine_mode tmode, mode0;
5549 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5550
5551 target = altivec_expand_ld_builtin (exp, target, expandedp);
5552 if (*expandedp)
5553 return target;
5554
5555 target = altivec_expand_st_builtin (exp, target, expandedp);
5556 if (*expandedp)
5557 return target;
5558
5559 target = altivec_expand_dst_builtin (exp, target, expandedp);
5560 if (*expandedp)
5561 return target;
5562
5563 *expandedp = true;
5564
5565 switch (fcode)
5566 {
5567 case ALTIVEC_BUILTIN_STVX:
5568 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
5569 case ALTIVEC_BUILTIN_STVEBX:
5570 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
5571 case ALTIVEC_BUILTIN_STVEHX:
5572 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
5573 case ALTIVEC_BUILTIN_STVEWX:
5574 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
5575 case ALTIVEC_BUILTIN_STVXL:
5576 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
5577
5578 case ALTIVEC_BUILTIN_MFVSCR:
5579 icode = CODE_FOR_altivec_mfvscr;
5580 tmode = insn_data[icode].operand[0].mode;
5581
5582 if (target == 0
5583 || GET_MODE (target) != tmode
5584 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5585 target = gen_reg_rtx (tmode);
5586
5587 pat = GEN_FCN (icode) (target);
5588 if (! pat)
5589 return 0;
5590 emit_insn (pat);
5591 return target;
5592
5593 case ALTIVEC_BUILTIN_MTVSCR:
5594 icode = CODE_FOR_altivec_mtvscr;
5595 arg0 = TREE_VALUE (arglist);
5596 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5597 mode0 = insn_data[icode].operand[0].mode;
5598
5599 /* If we got invalid arguments bail out before generating bad rtl. */
5600 if (arg0 == error_mark_node)
5601 return const0_rtx;
5602
5603 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5604 op0 = copy_to_mode_reg (mode0, op0);
5605
5606 pat = GEN_FCN (icode) (op0);
5607 if (pat)
5608 emit_insn (pat);
5609 return NULL_RTX;
5610
5611 case ALTIVEC_BUILTIN_DSSALL:
5612 emit_insn (gen_altivec_dssall ());
5613 return NULL_RTX;
5614
5615 case ALTIVEC_BUILTIN_DSS:
5616 icode = CODE_FOR_altivec_dss;
5617 arg0 = TREE_VALUE (arglist);
5618 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5619 mode0 = insn_data[icode].operand[0].mode;
5620
5621 /* If we got invalid arguments bail out before generating bad rtl. */
5622 if (arg0 == error_mark_node)
5623 return const0_rtx;
5624
5625 if (TREE_CODE (arg0) != INTEGER_CST
5626 || TREE_INT_CST_LOW (arg0) & ~0x3)
5627 {
5628 error ("argument to dss must be a 2-bit unsigned literal");
5629 return const0_rtx;
5630 }
5631
5632 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5633 op0 = copy_to_mode_reg (mode0, op0);
5634
5635 emit_insn (gen_altivec_dss (op0));
5636 return NULL_RTX;
5637 }
5638
5639 /* Expand abs* operations. */
5640 d = (struct builtin_description *) bdesc_abs;
5641 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
5642 if (d->code == fcode)
5643 return altivec_expand_abs_builtin (d->icode, arglist, target);
5644
5645 /* Expand the AltiVec predicates. */
5646 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
5647 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
5648 if (dp->code == fcode)
5649 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
5650
5651 /* LV* are funky. We initialized them differently. */
5652 switch (fcode)
5653 {
5654 case ALTIVEC_BUILTIN_LVSL:
5655 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl,
5656 arglist, target);
5657 case ALTIVEC_BUILTIN_LVSR:
5658 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr,
5659 arglist, target);
5660 case ALTIVEC_BUILTIN_LVEBX:
5661 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx,
5662 arglist, target);
5663 case ALTIVEC_BUILTIN_LVEHX:
5664 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx,
5665 arglist, target);
5666 case ALTIVEC_BUILTIN_LVEWX:
5667 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx,
5668 arglist, target);
5669 case ALTIVEC_BUILTIN_LVXL:
5670 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl,
5671 arglist, target);
5672 case ALTIVEC_BUILTIN_LVX:
5673 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx,
5674 arglist, target);
5675 default:
5676 break;
5677 /* Fall through. */
5678 }
5679
5680 *expandedp = false;
5681 return NULL_RTX;
5682 }
5683
5684 /* Binops that need to be initialized manually, but can be expanded
5685 automagically by rs6000_expand_binop_builtin. */
5686 static struct builtin_description bdesc_2arg_spe[] =
5687 {
5688 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
5689 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
5690 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
5691 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
5692 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
5693 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
5694 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
5695 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
5696 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
5697 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
5698 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
5699 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
5700 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
5701 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
5702 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
5703 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
5704 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
5705 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
5706 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
5707 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
5708 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
5709 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
5710 };
5711
5712 /* Expand the builtin in EXP and store the result in TARGET. Store
5713 true in *EXPANDEDP if we found a builtin to expand.
5714
5715 This expands the SPE builtins that are not simple unary and binary
5716 operations. */
5717 static rtx
5718 spe_expand_builtin (exp, target, expandedp)
5719 tree exp;
5720 rtx target;
5721 bool *expandedp;
5722 {
5723 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5724 tree arglist = TREE_OPERAND (exp, 1);
5725 tree arg1, arg0;
5726 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5727 enum insn_code icode;
5728 enum machine_mode tmode, mode0;
5729 rtx pat, op0;
5730 struct builtin_description *d;
5731 size_t i;
5732
5733 *expandedp = true;
5734
5735 /* Syntax check for a 5-bit unsigned immediate. */
5736 switch (fcode)
5737 {
5738 case SPE_BUILTIN_EVSTDD:
5739 case SPE_BUILTIN_EVSTDH:
5740 case SPE_BUILTIN_EVSTDW:
5741 case SPE_BUILTIN_EVSTWHE:
5742 case SPE_BUILTIN_EVSTWHO:
5743 case SPE_BUILTIN_EVSTWWE:
5744 case SPE_BUILTIN_EVSTWWO:
5745 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5746 if (TREE_CODE (arg1) != INTEGER_CST
5747 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5748 {
5749 error ("argument 2 must be a 5-bit unsigned literal");
5750 return const0_rtx;
5751 }
5752 break;
5753 default:
5754 break;
5755 }
5756
5757 d = (struct builtin_description *) bdesc_2arg_spe;
5758 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
5759 if (d->code == fcode)
5760 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5761
5762 d = (struct builtin_description *) bdesc_spe_predicates;
5763 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
5764 if (d->code == fcode)
5765 return spe_expand_predicate_builtin (d->icode, arglist, target);
5766
5767 d = (struct builtin_description *) bdesc_spe_evsel;
5768 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
5769 if (d->code == fcode)
5770 return spe_expand_evsel_builtin (d->icode, arglist, target);
5771
5772 switch (fcode)
5773 {
5774 case SPE_BUILTIN_EVSTDDX:
5775 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
5776 case SPE_BUILTIN_EVSTDHX:
5777 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
5778 case SPE_BUILTIN_EVSTDWX:
5779 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
5780 case SPE_BUILTIN_EVSTWHEX:
5781 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
5782 case SPE_BUILTIN_EVSTWHOX:
5783 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
5784 case SPE_BUILTIN_EVSTWWEX:
5785 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
5786 case SPE_BUILTIN_EVSTWWOX:
5787 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
5788 case SPE_BUILTIN_EVSTDD:
5789 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
5790 case SPE_BUILTIN_EVSTDH:
5791 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
5792 case SPE_BUILTIN_EVSTDW:
5793 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
5794 case SPE_BUILTIN_EVSTWHE:
5795 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
5796 case SPE_BUILTIN_EVSTWHO:
5797 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
5798 case SPE_BUILTIN_EVSTWWE:
5799 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
5800 case SPE_BUILTIN_EVSTWWO:
5801 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
5802 case SPE_BUILTIN_MFSPEFSCR:
5803 icode = CODE_FOR_spe_mfspefscr;
5804 tmode = insn_data[icode].operand[0].mode;
5805
5806 if (target == 0
5807 || GET_MODE (target) != tmode
5808 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5809 target = gen_reg_rtx (tmode);
5810
5811 pat = GEN_FCN (icode) (target);
5812 if (! pat)
5813 return 0;
5814 emit_insn (pat);
5815 return target;
5816 case SPE_BUILTIN_MTSPEFSCR:
5817 icode = CODE_FOR_spe_mtspefscr;
5818 arg0 = TREE_VALUE (arglist);
5819 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5820 mode0 = insn_data[icode].operand[0].mode;
5821
5822 if (arg0 == error_mark_node)
5823 return const0_rtx;
5824
5825 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5826 op0 = copy_to_mode_reg (mode0, op0);
5827
5828 pat = GEN_FCN (icode) (op0);
5829 if (pat)
5830 emit_insn (pat);
5831 return NULL_RTX;
5832 default:
5833 break;
5834 }
5835
5836 *expandedp = false;
5837 return NULL_RTX;
5838 }
5839
5840 static rtx
5841 spe_expand_predicate_builtin (icode, arglist, target)
5842 enum insn_code icode;
5843 tree arglist;
5844 rtx target;
5845 {
5846 rtx pat, scratch, tmp;
5847 tree form = TREE_VALUE (arglist);
5848 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5849 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5850 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5851 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5852 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5853 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5854 int form_int;
5855 enum rtx_code code;
5856
5857 if (TREE_CODE (form) != INTEGER_CST)
5858 {
5859 error ("argument 1 of __builtin_spe_predicate must be a constant");
5860 return const0_rtx;
5861 }
5862 else
5863 form_int = TREE_INT_CST_LOW (form);
5864
5865 if (mode0 != mode1)
5866 abort ();
5867
5868 if (arg0 == error_mark_node || arg1 == error_mark_node)
5869 return const0_rtx;
5870
5871 if (target == 0
5872 || GET_MODE (target) != SImode
5873 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
5874 target = gen_reg_rtx (SImode);
5875
5876 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5877 op0 = copy_to_mode_reg (mode0, op0);
5878 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5879 op1 = copy_to_mode_reg (mode1, op1);
5880
5881 scratch = gen_reg_rtx (CCmode);
5882
5883 pat = GEN_FCN (icode) (scratch, op0, op1);
5884 if (! pat)
5885 return const0_rtx;
5886 emit_insn (pat);
5887
5888 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
5889 _lower_. We use one compare, but look in different bits of the
5890 CR for each variant.
5891
5892 There are 2 elements in each SPE simd type (upper/lower). The CR
5893 bits are set as follows:
5894
5895 BIT0 | BIT 1 | BIT 2 | BIT 3
5896 U | L | (U | L) | (U & L)
5897
5898 So, for an "all" relationship, BIT 3 would be set.
5899 For an "any" relationship, BIT 2 would be set. Etc.
5900
5901 Following traditional nomenclature, these bits map to:
5902
5903 BIT0 | BIT 1 | BIT 2 | BIT 3
5904 LT | GT | EQ | OV
5905
5906 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
5907 */
5908
5909 switch (form_int)
5910 {
5911 /* All variant. OV bit. */
5912 case 0:
5913 /* We need to get to the OV bit, which is the ORDERED bit. We
5914 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
5915 that's ugly and will trigger a validate_condition_mode abort.
5916 So let's just use another pattern. */
5917 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
5918 return target;
5919 /* Any variant. EQ bit. */
5920 case 1:
5921 code = EQ;
5922 break;
5923 /* Upper variant. LT bit. */
5924 case 2:
5925 code = LT;
5926 break;
5927 /* Lower variant. GT bit. */
5928 case 3:
5929 code = GT;
5930 break;
5931 default:
5932 error ("argument 1 of __builtin_spe_predicate is out of range");
5933 return const0_rtx;
5934 }
5935
5936 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
5937 emit_move_insn (target, tmp);
5938
5939 return target;
5940 }
5941
5942 /* The evsel builtins look like this:
5943
5944 e = __builtin_spe_evsel_OP (a, b, c, d);
5945
5946 and work like this:
5947
5948 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5949 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5950 */
5951
5952 static rtx
5953 spe_expand_evsel_builtin (icode, arglist, target)
5954 enum insn_code icode;
5955 tree arglist;
5956 rtx target;
5957 {
5958 rtx pat, scratch;
5959 tree arg0 = TREE_VALUE (arglist);
5960 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5961 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5962 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
5963 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5964 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5965 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5966 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
5967 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5968 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5969
5970 if (mode0 != mode1)
5971 abort ();
5972
5973 if (arg0 == error_mark_node || arg1 == error_mark_node
5974 || arg2 == error_mark_node || arg3 == error_mark_node)
5975 return const0_rtx;
5976
5977 if (target == 0
5978 || GET_MODE (target) != mode0
5979 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
5980 target = gen_reg_rtx (mode0);
5981
5982 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5983 op0 = copy_to_mode_reg (mode0, op0);
5984 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5985 op1 = copy_to_mode_reg (mode0, op1);
5986 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5987 op2 = copy_to_mode_reg (mode0, op2);
5988 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
5989 op3 = copy_to_mode_reg (mode0, op3);
5990
5991 /* Generate the compare. */
5992 scratch = gen_reg_rtx (CCmode);
5993 pat = GEN_FCN (icode) (scratch, op0, op1);
5994 if (! pat)
5995 return const0_rtx;
5996 emit_insn (pat);
5997
5998 if (mode0 == V2SImode)
5999 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
6000 else
6001 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
6002
6003 return target;
6004 }
6005
6006 /* Expand an expression EXP that calls a built-in function,
6007 with result going to TARGET if that's convenient
6008 (and in mode MODE if that's convenient).
6009 SUBTARGET may be used as the target for computing one of EXP's operands.
6010 IGNORE is nonzero if the value is to be ignored. */
6011
6012 static rtx
6013 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
6014 tree exp;
6015 rtx target;
6016 rtx subtarget ATTRIBUTE_UNUSED;
6017 enum machine_mode mode ATTRIBUTE_UNUSED;
6018 int ignore ATTRIBUTE_UNUSED;
6019 {
6020 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6021 tree arglist = TREE_OPERAND (exp, 1);
6022 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6023 struct builtin_description *d;
6024 size_t i;
6025 rtx ret;
6026 bool success;
6027
6028 if (TARGET_ALTIVEC)
6029 {
6030 ret = altivec_expand_builtin (exp, target, &success);
6031
6032 if (success)
6033 return ret;
6034 }
6035 if (TARGET_SPE)
6036 {
6037 ret = spe_expand_builtin (exp, target, &success);
6038
6039 if (success)
6040 return ret;
6041 }
6042
6043 if (TARGET_ALTIVEC || TARGET_SPE)
6044 {
6045 /* Handle simple unary operations. */
6046 d = (struct builtin_description *) bdesc_1arg;
6047 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6048 if (d->code == fcode)
6049 return rs6000_expand_unop_builtin (d->icode, arglist, target);
6050
6051 /* Handle simple binary operations. */
6052 d = (struct builtin_description *) bdesc_2arg;
6053 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6054 if (d->code == fcode)
6055 return rs6000_expand_binop_builtin (d->icode, arglist, target);
6056
6057 /* Handle simple ternary operations. */
6058 d = (struct builtin_description *) bdesc_3arg;
6059 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
6060 if (d->code == fcode)
6061 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
6062 }
6063
6064 abort ();
6065 return NULL_RTX;
6066 }
6067
6068 static void
6069 rs6000_init_builtins ()
6070 {
6071 opaque_V2SI_type_node = copy_node (V2SI_type_node);
6072 opaque_V2SF_type_node = copy_node (V2SF_type_node);
6073 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
6074
6075 if (TARGET_SPE)
6076 spe_init_builtins ();
6077 if (TARGET_ALTIVEC)
6078 altivec_init_builtins ();
6079 if (TARGET_ALTIVEC || TARGET_SPE)
6080 rs6000_common_init_builtins ();
6081 }
6082
6083 /* Search through a set of builtins and enable the mask bits.
6084 DESC is an array of builtins.
6085 SIZE is the total number of builtins.
6086 START is the builtin enum at which to start.
6087 END is the builtin enum at which to end. */
6088 static void
6089 enable_mask_for_builtins (desc, size, start, end)
6090 struct builtin_description *desc;
6091 int size;
6092 enum rs6000_builtins start, end;
6093 {
6094 int i;
6095
6096 for (i = 0; i < size; ++i)
6097 if (desc[i].code == start)
6098 break;
6099
6100 if (i == size)
6101 return;
6102
6103 for (; i < size; ++i)
6104 {
6105 /* Flip all the bits on. */
6106 desc[i].mask = target_flags;
6107 if (desc[i].code == end)
6108 break;
6109 }
6110 }
6111
6112 static void
6113 spe_init_builtins ()
6114 {
6115 tree endlink = void_list_node;
6116 tree puint_type_node = build_pointer_type (unsigned_type_node);
6117 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
6118 struct builtin_description *d;
6119 size_t i;
6120
6121 tree v2si_ftype_4_v2si
6122 = build_function_type
6123 (opaque_V2SI_type_node,
6124 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6125 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6126 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6127 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6128 endlink)))));
6129
6130 tree v2sf_ftype_4_v2sf
6131 = build_function_type
6132 (opaque_V2SF_type_node,
6133 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6134 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6135 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6136 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6137 endlink)))));
6138
6139 tree int_ftype_int_v2si_v2si
6140 = build_function_type
6141 (integer_type_node,
6142 tree_cons (NULL_TREE, integer_type_node,
6143 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6144 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6145 endlink))));
6146
6147 tree int_ftype_int_v2sf_v2sf
6148 = build_function_type
6149 (integer_type_node,
6150 tree_cons (NULL_TREE, integer_type_node,
6151 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6152 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6153 endlink))));
6154
6155 tree void_ftype_v2si_puint_int
6156 = build_function_type (void_type_node,
6157 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6158 tree_cons (NULL_TREE, puint_type_node,
6159 tree_cons (NULL_TREE,
6160 integer_type_node,
6161 endlink))));
6162
6163 tree void_ftype_v2si_puint_char
6164 = build_function_type (void_type_node,
6165 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6166 tree_cons (NULL_TREE, puint_type_node,
6167 tree_cons (NULL_TREE,
6168 char_type_node,
6169 endlink))));
6170
6171 tree void_ftype_v2si_pv2si_int
6172 = build_function_type (void_type_node,
6173 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6174 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6175 tree_cons (NULL_TREE,
6176 integer_type_node,
6177 endlink))));
6178
6179 tree void_ftype_v2si_pv2si_char
6180 = build_function_type (void_type_node,
6181 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6182 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6183 tree_cons (NULL_TREE,
6184 char_type_node,
6185 endlink))));
6186
6187 tree void_ftype_int
6188 = build_function_type (void_type_node,
6189 tree_cons (NULL_TREE, integer_type_node, endlink));
6190
6191 tree int_ftype_void
6192 = build_function_type (integer_type_node, endlink);
6193
6194 tree v2si_ftype_pv2si_int
6195 = build_function_type (opaque_V2SI_type_node,
6196 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6197 tree_cons (NULL_TREE, integer_type_node,
6198 endlink)));
6199
6200 tree v2si_ftype_puint_int
6201 = build_function_type (opaque_V2SI_type_node,
6202 tree_cons (NULL_TREE, puint_type_node,
6203 tree_cons (NULL_TREE, integer_type_node,
6204 endlink)));
6205
6206 tree v2si_ftype_pushort_int
6207 = build_function_type (opaque_V2SI_type_node,
6208 tree_cons (NULL_TREE, pushort_type_node,
6209 tree_cons (NULL_TREE, integer_type_node,
6210 endlink)));
6211
6212 /* The initialization of the simple binary and unary builtins is
6213 done in rs6000_common_init_builtins, but we have to enable the
6214 mask bits here manually because we have run out of `target_flags'
6215 bits. We really need to redesign this mask business. */
6216
6217 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
6218 ARRAY_SIZE (bdesc_2arg),
6219 SPE_BUILTIN_EVADDW,
6220 SPE_BUILTIN_EVXOR);
6221 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
6222 ARRAY_SIZE (bdesc_1arg),
6223 SPE_BUILTIN_EVABS,
6224 SPE_BUILTIN_EVSUBFUSIAAW);
6225 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
6226 ARRAY_SIZE (bdesc_spe_predicates),
6227 SPE_BUILTIN_EVCMPEQ,
6228 SPE_BUILTIN_EVFSTSTLT);
6229 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
6230 ARRAY_SIZE (bdesc_spe_evsel),
6231 SPE_BUILTIN_EVSEL_CMPGTS,
6232 SPE_BUILTIN_EVSEL_FSTSTEQ);
6233
6234 /* Initialize irregular SPE builtins. */
6235
6236 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
6237 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
6238 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
6239 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
6240 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
6241 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
6242 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
6243 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
6244 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
6245 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
6246 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
6247 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
6248 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
6249 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
6250 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
6251 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
6252
6253 /* Loads. */
6254 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
6255 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
6256 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
6257 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
6258 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
6259 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
6260 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
6261 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
6262 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
6263 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
6264 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
6265 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
6266 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
6267 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
6268 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
6269 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
6270 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
6271 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
6272 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
6273 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
6274 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
6275 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
6276
6277 /* Predicates. */
6278 d = (struct builtin_description *) bdesc_spe_predicates;
6279 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
6280 {
6281 tree type;
6282
6283 switch (insn_data[d->icode].operand[1].mode)
6284 {
6285 case V2SImode:
6286 type = int_ftype_int_v2si_v2si;
6287 break;
6288 case V2SFmode:
6289 type = int_ftype_int_v2sf_v2sf;
6290 break;
6291 default:
6292 abort ();
6293 }
6294
6295 def_builtin (d->mask, d->name, type, d->code);
6296 }
6297
6298 /* Evsel predicates. */
6299 d = (struct builtin_description *) bdesc_spe_evsel;
6300 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
6301 {
6302 tree type;
6303
6304 switch (insn_data[d->icode].operand[1].mode)
6305 {
6306 case V2SImode:
6307 type = v2si_ftype_4_v2si;
6308 break;
6309 case V2SFmode:
6310 type = v2sf_ftype_4_v2sf;
6311 break;
6312 default:
6313 abort ();
6314 }
6315
6316 def_builtin (d->mask, d->name, type, d->code);
6317 }
6318 }
6319
6320 static void
6321 altivec_init_builtins ()
6322 {
6323 struct builtin_description *d;
6324 struct builtin_description_predicates *dp;
6325 size_t i;
6326 tree pfloat_type_node = build_pointer_type (float_type_node);
6327 tree pint_type_node = build_pointer_type (integer_type_node);
6328 tree pshort_type_node = build_pointer_type (short_integer_type_node);
6329 tree pchar_type_node = build_pointer_type (char_type_node);
6330
6331 tree pvoid_type_node = build_pointer_type (void_type_node);
6332
6333 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
6334 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
6335 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
6336 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
6337
6338 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
6339
6340 tree int_ftype_int_v4si_v4si
6341 = build_function_type_list (integer_type_node,
6342 integer_type_node, V4SI_type_node,
6343 V4SI_type_node, NULL_TREE);
6344 tree v4sf_ftype_pcfloat
6345 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
6346 tree void_ftype_pfloat_v4sf
6347 = build_function_type_list (void_type_node,
6348 pfloat_type_node, V4SF_type_node, NULL_TREE);
6349 tree v4si_ftype_pcint
6350 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
6351 tree void_ftype_pint_v4si
6352 = build_function_type_list (void_type_node,
6353 pint_type_node, V4SI_type_node, NULL_TREE);
6354 tree v8hi_ftype_pcshort
6355 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
6356 tree void_ftype_pshort_v8hi
6357 = build_function_type_list (void_type_node,
6358 pshort_type_node, V8HI_type_node, NULL_TREE);
6359 tree v16qi_ftype_pcchar
6360 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
6361 tree void_ftype_pchar_v16qi
6362 = build_function_type_list (void_type_node,
6363 pchar_type_node, V16QI_type_node, NULL_TREE);
6364 tree void_ftype_v4si
6365 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
6366 tree v8hi_ftype_void
6367 = build_function_type (V8HI_type_node, void_list_node);
6368 tree void_ftype_void
6369 = build_function_type (void_type_node, void_list_node);
6370 tree void_ftype_qi
6371 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
6372
6373 tree v16qi_ftype_int_pcvoid
6374 = build_function_type_list (V16QI_type_node,
6375 integer_type_node, pcvoid_type_node, NULL_TREE);
6376 tree v8hi_ftype_int_pcvoid
6377 = build_function_type_list (V8HI_type_node,
6378 integer_type_node, pcvoid_type_node, NULL_TREE);
6379 tree v4si_ftype_int_pcvoid
6380 = build_function_type_list (V4SI_type_node,
6381 integer_type_node, pcvoid_type_node, NULL_TREE);
6382
6383 tree void_ftype_v4si_int_pvoid
6384 = build_function_type_list (void_type_node,
6385 V4SI_type_node, integer_type_node,
6386 pvoid_type_node, NULL_TREE);
6387 tree void_ftype_v16qi_int_pvoid
6388 = build_function_type_list (void_type_node,
6389 V16QI_type_node, integer_type_node,
6390 pvoid_type_node, NULL_TREE);
6391 tree void_ftype_v8hi_int_pvoid
6392 = build_function_type_list (void_type_node,
6393 V8HI_type_node, integer_type_node,
6394 pvoid_type_node, NULL_TREE);
6395 tree int_ftype_int_v8hi_v8hi
6396 = build_function_type_list (integer_type_node,
6397 integer_type_node, V8HI_type_node,
6398 V8HI_type_node, NULL_TREE);
6399 tree int_ftype_int_v16qi_v16qi
6400 = build_function_type_list (integer_type_node,
6401 integer_type_node, V16QI_type_node,
6402 V16QI_type_node, NULL_TREE);
6403 tree int_ftype_int_v4sf_v4sf
6404 = build_function_type_list (integer_type_node,
6405 integer_type_node, V4SF_type_node,
6406 V4SF_type_node, NULL_TREE);
6407 tree v4si_ftype_v4si
6408 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
6409 tree v8hi_ftype_v8hi
6410 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
6411 tree v16qi_ftype_v16qi
6412 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
6413 tree v4sf_ftype_v4sf
6414 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
6415 tree void_ftype_pcvoid_int_char
6416 = build_function_type_list (void_type_node,
6417 pcvoid_type_node, integer_type_node,
6418 char_type_node, NULL_TREE);
6419
6420 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
6421 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
6422 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
6423 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
6424 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
6425 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
6426 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
6427 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
6428 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
6429 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
6430 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
6431 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
6432 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
6433 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
6434 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
6435 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
6436 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
6437 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
6438 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
6439 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
6440 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSL);
6441 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSR);
6442 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEBX);
6443 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEHX);
6444 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEWX);
6445 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVXL);
6446 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVX);
6447 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
6448 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
6449 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
6450 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
6451 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
6452
6453 /* Add the DST variants. */
6454 d = (struct builtin_description *) bdesc_dst;
6455 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
6456 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_char, d->code);
6457
6458 /* Initialize the predicates. */
6459 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6460 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6461 {
6462 enum machine_mode mode1;
6463 tree type;
6464
6465 mode1 = insn_data[dp->icode].operand[1].mode;
6466
6467 switch (mode1)
6468 {
6469 case V4SImode:
6470 type = int_ftype_int_v4si_v4si;
6471 break;
6472 case V8HImode:
6473 type = int_ftype_int_v8hi_v8hi;
6474 break;
6475 case V16QImode:
6476 type = int_ftype_int_v16qi_v16qi;
6477 break;
6478 case V4SFmode:
6479 type = int_ftype_int_v4sf_v4sf;
6480 break;
6481 default:
6482 abort ();
6483 }
6484
6485 def_builtin (dp->mask, dp->name, type, dp->code);
6486 }
6487
6488 /* Initialize the abs* operators. */
6489 d = (struct builtin_description *) bdesc_abs;
6490 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6491 {
6492 enum machine_mode mode0;
6493 tree type;
6494
6495 mode0 = insn_data[d->icode].operand[0].mode;
6496
6497 switch (mode0)
6498 {
6499 case V4SImode:
6500 type = v4si_ftype_v4si;
6501 break;
6502 case V8HImode:
6503 type = v8hi_ftype_v8hi;
6504 break;
6505 case V16QImode:
6506 type = v16qi_ftype_v16qi;
6507 break;
6508 case V4SFmode:
6509 type = v4sf_ftype_v4sf;
6510 break;
6511 default:
6512 abort ();
6513 }
6514
6515 def_builtin (d->mask, d->name, type, d->code);
6516 }
6517 }
6518
6519 static void
6520 rs6000_common_init_builtins ()
6521 {
6522 struct builtin_description *d;
6523 size_t i;
6524
6525 tree v4sf_ftype_v4sf_v4sf_v16qi
6526 = build_function_type_list (V4SF_type_node,
6527 V4SF_type_node, V4SF_type_node,
6528 V16QI_type_node, NULL_TREE);
6529 tree v4si_ftype_v4si_v4si_v16qi
6530 = build_function_type_list (V4SI_type_node,
6531 V4SI_type_node, V4SI_type_node,
6532 V16QI_type_node, NULL_TREE);
6533 tree v8hi_ftype_v8hi_v8hi_v16qi
6534 = build_function_type_list (V8HI_type_node,
6535 V8HI_type_node, V8HI_type_node,
6536 V16QI_type_node, NULL_TREE);
6537 tree v16qi_ftype_v16qi_v16qi_v16qi
6538 = build_function_type_list (V16QI_type_node,
6539 V16QI_type_node, V16QI_type_node,
6540 V16QI_type_node, NULL_TREE);
6541 tree v4si_ftype_char
6542 = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
6543 tree v8hi_ftype_char
6544 = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
6545 tree v16qi_ftype_char
6546 = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
6547 tree v8hi_ftype_v16qi
6548 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
6549 tree v4sf_ftype_v4sf
6550 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
6551
6552 tree v2si_ftype_v2si_v2si
6553 = build_function_type_list (opaque_V2SI_type_node,
6554 opaque_V2SI_type_node,
6555 opaque_V2SI_type_node, NULL_TREE);
6556
6557 tree v2sf_ftype_v2sf_v2sf
6558 = build_function_type_list (opaque_V2SF_type_node,
6559 opaque_V2SF_type_node,
6560 opaque_V2SF_type_node, NULL_TREE);
6561
6562 tree v2si_ftype_int_int
6563 = build_function_type_list (opaque_V2SI_type_node,
6564 integer_type_node, integer_type_node,
6565 NULL_TREE);
6566
6567 tree v2si_ftype_v2si
6568 = build_function_type_list (opaque_V2SI_type_node,
6569 opaque_V2SI_type_node, NULL_TREE);
6570
6571 tree v2sf_ftype_v2sf
6572 = build_function_type_list (opaque_V2SF_type_node,
6573 opaque_V2SF_type_node, NULL_TREE);
6574
6575 tree v2sf_ftype_v2si
6576 = build_function_type_list (opaque_V2SF_type_node,
6577 opaque_V2SI_type_node, NULL_TREE);
6578
6579 tree v2si_ftype_v2sf
6580 = build_function_type_list (opaque_V2SI_type_node,
6581 opaque_V2SF_type_node, NULL_TREE);
6582
6583 tree v2si_ftype_v2si_char
6584 = build_function_type_list (opaque_V2SI_type_node,
6585 opaque_V2SI_type_node,
6586 char_type_node, NULL_TREE);
6587
6588 tree v2si_ftype_int_char
6589 = build_function_type_list (opaque_V2SI_type_node,
6590 integer_type_node, char_type_node, NULL_TREE);
6591
6592 tree v2si_ftype_char
6593 = build_function_type_list (opaque_V2SI_type_node,
6594 char_type_node, NULL_TREE);
6595
6596 tree int_ftype_int_int
6597 = build_function_type_list (integer_type_node,
6598 integer_type_node, integer_type_node,
6599 NULL_TREE);
6600
6601 tree v4si_ftype_v4si_v4si
6602 = build_function_type_list (V4SI_type_node,
6603 V4SI_type_node, V4SI_type_node, NULL_TREE);
6604 tree v4sf_ftype_v4si_char
6605 = build_function_type_list (V4SF_type_node,
6606 V4SI_type_node, char_type_node, NULL_TREE);
6607 tree v4si_ftype_v4sf_char
6608 = build_function_type_list (V4SI_type_node,
6609 V4SF_type_node, char_type_node, NULL_TREE);
6610 tree v4si_ftype_v4si_char
6611 = build_function_type_list (V4SI_type_node,
6612 V4SI_type_node, char_type_node, NULL_TREE);
6613 tree v8hi_ftype_v8hi_char
6614 = build_function_type_list (V8HI_type_node,
6615 V8HI_type_node, char_type_node, NULL_TREE);
6616 tree v16qi_ftype_v16qi_char
6617 = build_function_type_list (V16QI_type_node,
6618 V16QI_type_node, char_type_node, NULL_TREE);
6619 tree v16qi_ftype_v16qi_v16qi_char
6620 = build_function_type_list (V16QI_type_node,
6621 V16QI_type_node, V16QI_type_node,
6622 char_type_node, NULL_TREE);
6623 tree v8hi_ftype_v8hi_v8hi_char
6624 = build_function_type_list (V8HI_type_node,
6625 V8HI_type_node, V8HI_type_node,
6626 char_type_node, NULL_TREE);
6627 tree v4si_ftype_v4si_v4si_char
6628 = build_function_type_list (V4SI_type_node,
6629 V4SI_type_node, V4SI_type_node,
6630 char_type_node, NULL_TREE);
6631 tree v4sf_ftype_v4sf_v4sf_char
6632 = build_function_type_list (V4SF_type_node,
6633 V4SF_type_node, V4SF_type_node,
6634 char_type_node, NULL_TREE);
6635 tree v4sf_ftype_v4sf_v4sf
6636 = build_function_type_list (V4SF_type_node,
6637 V4SF_type_node, V4SF_type_node, NULL_TREE);
6638 tree v4sf_ftype_v4sf_v4sf_v4si
6639 = build_function_type_list (V4SF_type_node,
6640 V4SF_type_node, V4SF_type_node,
6641 V4SI_type_node, NULL_TREE);
6642 tree v4sf_ftype_v4sf_v4sf_v4sf
6643 = build_function_type_list (V4SF_type_node,
6644 V4SF_type_node, V4SF_type_node,
6645 V4SF_type_node, NULL_TREE);
6646 tree v4si_ftype_v4si_v4si_v4si
6647 = build_function_type_list (V4SI_type_node,
6648 V4SI_type_node, V4SI_type_node,
6649 V4SI_type_node, NULL_TREE);
6650 tree v8hi_ftype_v8hi_v8hi
6651 = build_function_type_list (V8HI_type_node,
6652 V8HI_type_node, V8HI_type_node, NULL_TREE);
6653 tree v8hi_ftype_v8hi_v8hi_v8hi
6654 = build_function_type_list (V8HI_type_node,
6655 V8HI_type_node, V8HI_type_node,
6656 V8HI_type_node, NULL_TREE);
6657 tree v4si_ftype_v8hi_v8hi_v4si
6658 = build_function_type_list (V4SI_type_node,
6659 V8HI_type_node, V8HI_type_node,
6660 V4SI_type_node, NULL_TREE);
6661 tree v4si_ftype_v16qi_v16qi_v4si
6662 = build_function_type_list (V4SI_type_node,
6663 V16QI_type_node, V16QI_type_node,
6664 V4SI_type_node, NULL_TREE);
6665 tree v16qi_ftype_v16qi_v16qi
6666 = build_function_type_list (V16QI_type_node,
6667 V16QI_type_node, V16QI_type_node, NULL_TREE);
6668 tree v4si_ftype_v4sf_v4sf
6669 = build_function_type_list (V4SI_type_node,
6670 V4SF_type_node, V4SF_type_node, NULL_TREE);
6671 tree v8hi_ftype_v16qi_v16qi
6672 = build_function_type_list (V8HI_type_node,
6673 V16QI_type_node, V16QI_type_node, NULL_TREE);
6674 tree v4si_ftype_v8hi_v8hi
6675 = build_function_type_list (V4SI_type_node,
6676 V8HI_type_node, V8HI_type_node, NULL_TREE);
6677 tree v8hi_ftype_v4si_v4si
6678 = build_function_type_list (V8HI_type_node,
6679 V4SI_type_node, V4SI_type_node, NULL_TREE);
6680 tree v16qi_ftype_v8hi_v8hi
6681 = build_function_type_list (V16QI_type_node,
6682 V8HI_type_node, V8HI_type_node, NULL_TREE);
6683 tree v4si_ftype_v16qi_v4si
6684 = build_function_type_list (V4SI_type_node,
6685 V16QI_type_node, V4SI_type_node, NULL_TREE);
6686 tree v4si_ftype_v16qi_v16qi
6687 = build_function_type_list (V4SI_type_node,
6688 V16QI_type_node, V16QI_type_node, NULL_TREE);
6689 tree v4si_ftype_v8hi_v4si
6690 = build_function_type_list (V4SI_type_node,
6691 V8HI_type_node, V4SI_type_node, NULL_TREE);
6692 tree v4si_ftype_v8hi
6693 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
6694 tree int_ftype_v4si_v4si
6695 = build_function_type_list (integer_type_node,
6696 V4SI_type_node, V4SI_type_node, NULL_TREE);
6697 tree int_ftype_v4sf_v4sf
6698 = build_function_type_list (integer_type_node,
6699 V4SF_type_node, V4SF_type_node, NULL_TREE);
6700 tree int_ftype_v16qi_v16qi
6701 = build_function_type_list (integer_type_node,
6702 V16QI_type_node, V16QI_type_node, NULL_TREE);
6703 tree int_ftype_v8hi_v8hi
6704 = build_function_type_list (integer_type_node,
6705 V8HI_type_node, V8HI_type_node, NULL_TREE);
6706
6707 /* Add the simple ternary operators. */
6708 d = (struct builtin_description *) bdesc_3arg;
6709 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
6710 {
6711
6712 enum machine_mode mode0, mode1, mode2, mode3;
6713 tree type;
6714
6715 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6716 continue;
6717
6718 mode0 = insn_data[d->icode].operand[0].mode;
6719 mode1 = insn_data[d->icode].operand[1].mode;
6720 mode2 = insn_data[d->icode].operand[2].mode;
6721 mode3 = insn_data[d->icode].operand[3].mode;
6722
6723 /* When all four are of the same mode. */
6724 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
6725 {
6726 switch (mode0)
6727 {
6728 case V4SImode:
6729 type = v4si_ftype_v4si_v4si_v4si;
6730 break;
6731 case V4SFmode:
6732 type = v4sf_ftype_v4sf_v4sf_v4sf;
6733 break;
6734 case V8HImode:
6735 type = v8hi_ftype_v8hi_v8hi_v8hi;
6736 break;
6737 case V16QImode:
6738 type = v16qi_ftype_v16qi_v16qi_v16qi;
6739 break;
6740 default:
6741 abort();
6742 }
6743 }
6744 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
6745 {
6746 switch (mode0)
6747 {
6748 case V4SImode:
6749 type = v4si_ftype_v4si_v4si_v16qi;
6750 break;
6751 case V4SFmode:
6752 type = v4sf_ftype_v4sf_v4sf_v16qi;
6753 break;
6754 case V8HImode:
6755 type = v8hi_ftype_v8hi_v8hi_v16qi;
6756 break;
6757 case V16QImode:
6758 type = v16qi_ftype_v16qi_v16qi_v16qi;
6759 break;
6760 default:
6761 abort();
6762 }
6763 }
6764 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
6765 && mode3 == V4SImode)
6766 type = v4si_ftype_v16qi_v16qi_v4si;
6767 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
6768 && mode3 == V4SImode)
6769 type = v4si_ftype_v8hi_v8hi_v4si;
6770 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
6771 && mode3 == V4SImode)
6772 type = v4sf_ftype_v4sf_v4sf_v4si;
6773
6774 /* vchar, vchar, vchar, 4 bit literal. */
6775 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
6776 && mode3 == QImode)
6777 type = v16qi_ftype_v16qi_v16qi_char;
6778
6779 /* vshort, vshort, vshort, 4 bit literal. */
6780 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
6781 && mode3 == QImode)
6782 type = v8hi_ftype_v8hi_v8hi_char;
6783
6784 /* vint, vint, vint, 4 bit literal. */
6785 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
6786 && mode3 == QImode)
6787 type = v4si_ftype_v4si_v4si_char;
6788
6789 /* vfloat, vfloat, vfloat, 4 bit literal. */
6790 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
6791 && mode3 == QImode)
6792 type = v4sf_ftype_v4sf_v4sf_char;
6793
6794 else
6795 abort ();
6796
6797 def_builtin (d->mask, d->name, type, d->code);
6798 }
6799
6800 /* Add the simple binary operators. */
6801 d = (struct builtin_description *) bdesc_2arg;
6802 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6803 {
6804 enum machine_mode mode0, mode1, mode2;
6805 tree type;
6806
6807 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6808 continue;
6809
6810 mode0 = insn_data[d->icode].operand[0].mode;
6811 mode1 = insn_data[d->icode].operand[1].mode;
6812 mode2 = insn_data[d->icode].operand[2].mode;
6813
6814 /* When all three operands are of the same mode. */
6815 if (mode0 == mode1 && mode1 == mode2)
6816 {
6817 switch (mode0)
6818 {
6819 case V4SFmode:
6820 type = v4sf_ftype_v4sf_v4sf;
6821 break;
6822 case V4SImode:
6823 type = v4si_ftype_v4si_v4si;
6824 break;
6825 case V16QImode:
6826 type = v16qi_ftype_v16qi_v16qi;
6827 break;
6828 case V8HImode:
6829 type = v8hi_ftype_v8hi_v8hi;
6830 break;
6831 case V2SImode:
6832 type = v2si_ftype_v2si_v2si;
6833 break;
6834 case V2SFmode:
6835 type = v2sf_ftype_v2sf_v2sf;
6836 break;
6837 case SImode:
6838 type = int_ftype_int_int;
6839 break;
6840 default:
6841 abort ();
6842 }
6843 }
6844
6845 /* A few other combos we really don't want to do manually. */
6846
6847 /* vint, vfloat, vfloat. */
6848 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
6849 type = v4si_ftype_v4sf_v4sf;
6850
6851 /* vshort, vchar, vchar. */
6852 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
6853 type = v8hi_ftype_v16qi_v16qi;
6854
6855 /* vint, vshort, vshort. */
6856 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
6857 type = v4si_ftype_v8hi_v8hi;
6858
6859 /* vshort, vint, vint. */
6860 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
6861 type = v8hi_ftype_v4si_v4si;
6862
6863 /* vchar, vshort, vshort. */
6864 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
6865 type = v16qi_ftype_v8hi_v8hi;
6866
6867 /* vint, vchar, vint. */
6868 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
6869 type = v4si_ftype_v16qi_v4si;
6870
6871 /* vint, vchar, vchar. */
6872 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
6873 type = v4si_ftype_v16qi_v16qi;
6874
6875 /* vint, vshort, vint. */
6876 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
6877 type = v4si_ftype_v8hi_v4si;
6878
6879 /* vint, vint, 5 bit literal. */
6880 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
6881 type = v4si_ftype_v4si_char;
6882
6883 /* vshort, vshort, 5 bit literal. */
6884 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
6885 type = v8hi_ftype_v8hi_char;
6886
6887 /* vchar, vchar, 5 bit literal. */
6888 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
6889 type = v16qi_ftype_v16qi_char;
6890
6891 /* vfloat, vint, 5 bit literal. */
6892 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
6893 type = v4sf_ftype_v4si_char;
6894
6895 /* vint, vfloat, 5 bit literal. */
6896 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
6897 type = v4si_ftype_v4sf_char;
6898
6899 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
6900 type = v2si_ftype_int_int;
6901
6902 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
6903 type = v2si_ftype_v2si_char;
6904
6905 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
6906 type = v2si_ftype_int_char;
6907
6908 /* int, x, x. */
6909 else if (mode0 == SImode)
6910 {
6911 switch (mode1)
6912 {
6913 case V4SImode:
6914 type = int_ftype_v4si_v4si;
6915 break;
6916 case V4SFmode:
6917 type = int_ftype_v4sf_v4sf;
6918 break;
6919 case V16QImode:
6920 type = int_ftype_v16qi_v16qi;
6921 break;
6922 case V8HImode:
6923 type = int_ftype_v8hi_v8hi;
6924 break;
6925 default:
6926 abort ();
6927 }
6928 }
6929
6930 else
6931 abort ();
6932
6933 def_builtin (d->mask, d->name, type, d->code);
6934 }
6935
6936 /* Add the simple unary operators. */
6937 d = (struct builtin_description *) bdesc_1arg;
6938 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6939 {
6940 enum machine_mode mode0, mode1;
6941 tree type;
6942
6943 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6944 continue;
6945
6946 mode0 = insn_data[d->icode].operand[0].mode;
6947 mode1 = insn_data[d->icode].operand[1].mode;
6948
6949 if (mode0 == V4SImode && mode1 == QImode)
6950 type = v4si_ftype_char;
6951 else if (mode0 == V8HImode && mode1 == QImode)
6952 type = v8hi_ftype_char;
6953 else if (mode0 == V16QImode && mode1 == QImode)
6954 type = v16qi_ftype_char;
6955 else if (mode0 == V4SFmode && mode1 == V4SFmode)
6956 type = v4sf_ftype_v4sf;
6957 else if (mode0 == V8HImode && mode1 == V16QImode)
6958 type = v8hi_ftype_v16qi;
6959 else if (mode0 == V4SImode && mode1 == V8HImode)
6960 type = v4si_ftype_v8hi;
6961 else if (mode0 == V2SImode && mode1 == V2SImode)
6962 type = v2si_ftype_v2si;
6963 else if (mode0 == V2SFmode && mode1 == V2SFmode)
6964 type = v2sf_ftype_v2sf;
6965 else if (mode0 == V2SFmode && mode1 == V2SImode)
6966 type = v2sf_ftype_v2si;
6967 else if (mode0 == V2SImode && mode1 == V2SFmode)
6968 type = v2si_ftype_v2sf;
6969 else if (mode0 == V2SImode && mode1 == QImode)
6970 type = v2si_ftype_char;
6971 else
6972 abort ();
6973
6974 def_builtin (d->mask, d->name, type, d->code);
6975 }
6976 }
6977
6978 \f
6979 /* Expand a block move operation, and return 1 if successful. Return 0
6980 if we should let the compiler generate normal code.
6981
6982 operands[0] is the destination
6983 operands[1] is the source
6984 operands[2] is the length
6985 operands[3] is the alignment */
6986
6987 #define MAX_MOVE_REG 4
6988
6989 int
6990 expand_block_move (operands)
6991 rtx operands[];
6992 {
6993 rtx orig_dest = operands[0];
6994 rtx orig_src = operands[1];
6995 rtx bytes_rtx = operands[2];
6996 rtx align_rtx = operands[3];
6997 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
6998 int align;
6999 int bytes;
7000 int offset;
7001 int move_bytes;
7002 rtx stores[MAX_MOVE_REG];
7003 int num_reg = 0;
7004
7005 /* If this is not a fixed size move, just call memcpy */
7006 if (! constp)
7007 return 0;
7008
7009 /* If this is not a fixed size alignment, abort */
7010 if (GET_CODE (align_rtx) != CONST_INT)
7011 abort ();
7012 align = INTVAL (align_rtx);
7013
7014 /* Anything to move? */
7015 bytes = INTVAL (bytes_rtx);
7016 if (bytes <= 0)
7017 return 1;
7018
7019 /* store_one_arg depends on expand_block_move to handle at least the size of
7020 reg_parm_stack_space. */
7021 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7022 return 0;
7023
7024 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7025 {
7026 union {
7027 rtx (*movstrsi) PARAMS ((rtx, rtx, rtx, rtx));
7028 rtx (*mov) PARAMS ((rtx, rtx));
7029 } gen_func;
7030 enum machine_mode mode = BLKmode;
7031 rtx src, dest;
7032
7033 if (TARGET_STRING
7034 && bytes > 24 /* move up to 32 bytes at a time */
7035 && ! fixed_regs[5]
7036 && ! fixed_regs[6]
7037 && ! fixed_regs[7]
7038 && ! fixed_regs[8]
7039 && ! fixed_regs[9]
7040 && ! fixed_regs[10]
7041 && ! fixed_regs[11]
7042 && ! fixed_regs[12])
7043 {
7044 move_bytes = (bytes > 32) ? 32 : bytes;
7045 gen_func.movstrsi = gen_movstrsi_8reg;
7046 }
7047 else if (TARGET_STRING
7048 && bytes > 16 /* move up to 24 bytes at a time */
7049 && ! fixed_regs[5]
7050 && ! fixed_regs[6]
7051 && ! fixed_regs[7]
7052 && ! fixed_regs[8]
7053 && ! fixed_regs[9]
7054 && ! fixed_regs[10])
7055 {
7056 move_bytes = (bytes > 24) ? 24 : bytes;
7057 gen_func.movstrsi = gen_movstrsi_6reg;
7058 }
7059 else if (TARGET_STRING
7060 && bytes > 8 /* move up to 16 bytes at a time */
7061 && ! fixed_regs[5]
7062 && ! fixed_regs[6]
7063 && ! fixed_regs[7]
7064 && ! fixed_regs[8])
7065 {
7066 move_bytes = (bytes > 16) ? 16 : bytes;
7067 gen_func.movstrsi = gen_movstrsi_4reg;
7068 }
7069 else if (bytes >= 8 && TARGET_POWERPC64
7070 /* 64-bit loads and stores require word-aligned
7071 displacements. */
7072 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
7073 {
7074 move_bytes = 8;
7075 mode = DImode;
7076 gen_func.mov = gen_movdi;
7077 }
7078 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
7079 { /* move up to 8 bytes at a time */
7080 move_bytes = (bytes > 8) ? 8 : bytes;
7081 gen_func.movstrsi = gen_movstrsi_2reg;
7082 }
7083 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
7084 { /* move 4 bytes */
7085 move_bytes = 4;
7086 mode = SImode;
7087 gen_func.mov = gen_movsi;
7088 }
7089 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
7090 { /* move 2 bytes */
7091 move_bytes = 2;
7092 mode = HImode;
7093 gen_func.mov = gen_movhi;
7094 }
7095 else if (TARGET_STRING && bytes > 1)
7096 { /* move up to 4 bytes at a time */
7097 move_bytes = (bytes > 4) ? 4 : bytes;
7098 gen_func.movstrsi = gen_movstrsi_1reg;
7099 }
7100 else /* move 1 byte at a time */
7101 {
7102 move_bytes = 1;
7103 mode = QImode;
7104 gen_func.mov = gen_movqi;
7105 }
7106
7107 src = adjust_address (orig_src, mode, offset);
7108 dest = adjust_address (orig_dest, mode, offset);
7109
7110 if (mode != BLKmode)
7111 {
7112 rtx tmp_reg = gen_reg_rtx (mode);
7113
7114 emit_insn ((*gen_func.mov) (tmp_reg, src));
7115 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
7116 }
7117
7118 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
7119 {
7120 int i;
7121 for (i = 0; i < num_reg; i++)
7122 emit_insn (stores[i]);
7123 num_reg = 0;
7124 }
7125
7126 if (mode == BLKmode)
7127 {
7128 /* Move the address into scratch registers. The movstrsi
7129 patterns require zero offset. */
7130 if (!REG_P (XEXP (src, 0)))
7131 {
7132 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
7133 src = replace_equiv_address (src, src_reg);
7134 }
7135 set_mem_size (src, GEN_INT (move_bytes));
7136
7137 if (!REG_P (XEXP (dest, 0)))
7138 {
7139 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
7140 dest = replace_equiv_address (dest, dest_reg);
7141 }
7142 set_mem_size (dest, GEN_INT (move_bytes));
7143
7144 emit_insn ((*gen_func.movstrsi) (dest, src,
7145 GEN_INT (move_bytes & 31),
7146 align_rtx));
7147 }
7148 }
7149
7150 return 1;
7151 }
7152
7153 \f
7154 /* Return 1 if OP is a load multiple operation. It is known to be a
7155 PARALLEL and the first section will be tested. */
7156
7157 int
7158 load_multiple_operation (op, mode)
7159 rtx op;
7160 enum machine_mode mode ATTRIBUTE_UNUSED;
7161 {
7162 int count = XVECLEN (op, 0);
7163 unsigned int dest_regno;
7164 rtx src_addr;
7165 int i;
7166
7167 /* Perform a quick check so we don't blow up below. */
7168 if (count <= 1
7169 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7170 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7171 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7172 return 0;
7173
7174 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7175 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7176
7177 for (i = 1; i < count; i++)
7178 {
7179 rtx elt = XVECEXP (op, 0, i);
7180
7181 if (GET_CODE (elt) != SET
7182 || GET_CODE (SET_DEST (elt)) != REG
7183 || GET_MODE (SET_DEST (elt)) != SImode
7184 || REGNO (SET_DEST (elt)) != dest_regno + i
7185 || GET_CODE (SET_SRC (elt)) != MEM
7186 || GET_MODE (SET_SRC (elt)) != SImode
7187 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
7188 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
7189 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
7190 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
7191 return 0;
7192 }
7193
7194 return 1;
7195 }
7196
7197 /* Similar, but tests for store multiple. Here, the second vector element
7198 is a CLOBBER. It will be tested later. */
7199
7200 int
7201 store_multiple_operation (op, mode)
7202 rtx op;
7203 enum machine_mode mode ATTRIBUTE_UNUSED;
7204 {
7205 int count = XVECLEN (op, 0) - 1;
7206 unsigned int src_regno;
7207 rtx dest_addr;
7208 int i;
7209
7210 /* Perform a quick check so we don't blow up below. */
7211 if (count <= 1
7212 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7213 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7214 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7215 return 0;
7216
7217 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7218 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7219
7220 for (i = 1; i < count; i++)
7221 {
7222 rtx elt = XVECEXP (op, 0, i + 1);
7223
7224 if (GET_CODE (elt) != SET
7225 || GET_CODE (SET_SRC (elt)) != REG
7226 || GET_MODE (SET_SRC (elt)) != SImode
7227 || REGNO (SET_SRC (elt)) != src_regno + i
7228 || GET_CODE (SET_DEST (elt)) != MEM
7229 || GET_MODE (SET_DEST (elt)) != SImode
7230 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
7231 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
7232 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
7233 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
7234 return 0;
7235 }
7236
7237 return 1;
7238 }
7239
7240 /* Return a string to perform a load_multiple operation.
7241 operands[0] is the vector.
7242 operands[1] is the source address.
7243 operands[2] is the first destination register. */
7244
7245 const char *
7246 rs6000_output_load_multiple (operands)
7247 rtx operands[3];
7248 {
7249 /* We have to handle the case where the pseudo used to contain the address
7250 is assigned to one of the output registers. */
7251 int i, j;
7252 int words = XVECLEN (operands[0], 0);
7253 rtx xop[10];
7254
7255 if (XVECLEN (operands[0], 0) == 1)
7256 return "{l|lwz} %2,0(%1)";
7257
7258 for (i = 0; i < words; i++)
7259 if (refers_to_regno_p (REGNO (operands[2]) + i,
7260 REGNO (operands[2]) + i + 1, operands[1], 0))
7261 {
7262 if (i == words-1)
7263 {
7264 xop[0] = GEN_INT (4 * (words-1));
7265 xop[1] = operands[1];
7266 xop[2] = operands[2];
7267 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
7268 return "";
7269 }
7270 else if (i == 0)
7271 {
7272 xop[0] = GEN_INT (4 * (words-1));
7273 xop[1] = operands[1];
7274 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
7275 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
7276 return "";
7277 }
7278 else
7279 {
7280 for (j = 0; j < words; j++)
7281 if (j != i)
7282 {
7283 xop[0] = GEN_INT (j * 4);
7284 xop[1] = operands[1];
7285 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
7286 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
7287 }
7288 xop[0] = GEN_INT (i * 4);
7289 xop[1] = operands[1];
7290 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
7291 return "";
7292 }
7293 }
7294
7295 return "{lsi|lswi} %2,%1,%N0";
7296 }
7297
7298 /* Return 1 for a parallel vrsave operation. */
7299
7300 int
7301 vrsave_operation (op, mode)
7302 rtx op;
7303 enum machine_mode mode ATTRIBUTE_UNUSED;
7304 {
7305 int count = XVECLEN (op, 0);
7306 unsigned int dest_regno, src_regno;
7307 int i;
7308
7309 if (count <= 1
7310 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7311 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7312 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
7313 return 0;
7314
7315 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7316 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7317
7318 if (dest_regno != VRSAVE_REGNO
7319 && src_regno != VRSAVE_REGNO)
7320 return 0;
7321
7322 for (i = 1; i < count; i++)
7323 {
7324 rtx elt = XVECEXP (op, 0, i);
7325
7326 if (GET_CODE (elt) != CLOBBER
7327 && GET_CODE (elt) != SET)
7328 return 0;
7329 }
7330
7331 return 1;
7332 }
7333
7334 /* Return 1 for an PARALLEL suitable for mfcr. */
7335
7336 int
7337 mfcr_operation (op, mode)
7338 rtx op;
7339 enum machine_mode mode ATTRIBUTE_UNUSED;
7340 {
7341 int count = XVECLEN (op, 0);
7342 int i;
7343
7344 /* Perform a quick check so we don't blow up below. */
7345 if (count < 1
7346 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7347 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
7348 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
7349 return 0;
7350
7351 for (i = 0; i < count; i++)
7352 {
7353 rtx exp = XVECEXP (op, 0, i);
7354 rtx unspec;
7355 int maskval;
7356 rtx src_reg;
7357
7358 src_reg = XVECEXP (SET_SRC (exp), 0, 0);
7359
7360 if (GET_CODE (src_reg) != REG
7361 || GET_MODE (src_reg) != CCmode
7362 || ! CR_REGNO_P (REGNO (src_reg)))
7363 return 0;
7364
7365 if (GET_CODE (exp) != SET
7366 || GET_CODE (SET_DEST (exp)) != REG
7367 || GET_MODE (SET_DEST (exp)) != SImode
7368 || ! INT_REGNO_P (REGNO (SET_DEST (exp))))
7369 return 0;
7370 unspec = SET_SRC (exp);
7371 maskval = 1 << (MAX_CR_REGNO - REGNO (src_reg));
7372
7373 if (GET_CODE (unspec) != UNSPEC
7374 || XINT (unspec, 1) != UNSPEC_MOVESI_FROM_CR
7375 || XVECLEN (unspec, 0) != 2
7376 || XVECEXP (unspec, 0, 0) != src_reg
7377 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
7378 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
7379 return 0;
7380 }
7381 return 1;
7382 }
7383
7384 /* Return 1 for an PARALLEL suitable for mtcrf. */
7385
7386 int
7387 mtcrf_operation (op, mode)
7388 rtx op;
7389 enum machine_mode mode ATTRIBUTE_UNUSED;
7390 {
7391 int count = XVECLEN (op, 0);
7392 int i;
7393 rtx src_reg;
7394
7395 /* Perform a quick check so we don't blow up below. */
7396 if (count < 1
7397 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7398 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
7399 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
7400 return 0;
7401 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
7402
7403 if (GET_CODE (src_reg) != REG
7404 || GET_MODE (src_reg) != SImode
7405 || ! INT_REGNO_P (REGNO (src_reg)))
7406 return 0;
7407
7408 for (i = 0; i < count; i++)
7409 {
7410 rtx exp = XVECEXP (op, 0, i);
7411 rtx unspec;
7412 int maskval;
7413
7414 if (GET_CODE (exp) != SET
7415 || GET_CODE (SET_DEST (exp)) != REG
7416 || GET_MODE (SET_DEST (exp)) != CCmode
7417 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
7418 return 0;
7419 unspec = SET_SRC (exp);
7420 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
7421
7422 if (GET_CODE (unspec) != UNSPEC
7423 || XINT (unspec, 1) != UNSPEC_MOVESI_TO_CR
7424 || XVECLEN (unspec, 0) != 2
7425 || XVECEXP (unspec, 0, 0) != src_reg
7426 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
7427 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
7428 return 0;
7429 }
7430 return 1;
7431 }
7432
7433 /* Return 1 for an PARALLEL suitable for lmw. */
7434
7435 int
7436 lmw_operation (op, mode)
7437 rtx op;
7438 enum machine_mode mode ATTRIBUTE_UNUSED;
7439 {
7440 int count = XVECLEN (op, 0);
7441 unsigned int dest_regno;
7442 rtx src_addr;
7443 unsigned int base_regno;
7444 HOST_WIDE_INT offset;
7445 int i;
7446
7447 /* Perform a quick check so we don't blow up below. */
7448 if (count <= 1
7449 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7450 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7451 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7452 return 0;
7453
7454 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7455 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7456
7457 if (dest_regno > 31
7458 || count != 32 - (int) dest_regno)
7459 return 0;
7460
7461 if (legitimate_indirect_address_p (src_addr, 0))
7462 {
7463 offset = 0;
7464 base_regno = REGNO (src_addr);
7465 if (base_regno == 0)
7466 return 0;
7467 }
7468 else if (legitimate_offset_address_p (SImode, src_addr, 0))
7469 {
7470 offset = INTVAL (XEXP (src_addr, 1));
7471 base_regno = REGNO (XEXP (src_addr, 0));
7472 }
7473 else
7474 return 0;
7475
7476 for (i = 0; i < count; i++)
7477 {
7478 rtx elt = XVECEXP (op, 0, i);
7479 rtx newaddr;
7480 rtx addr_reg;
7481 HOST_WIDE_INT newoffset;
7482
7483 if (GET_CODE (elt) != SET
7484 || GET_CODE (SET_DEST (elt)) != REG
7485 || GET_MODE (SET_DEST (elt)) != SImode
7486 || REGNO (SET_DEST (elt)) != dest_regno + i
7487 || GET_CODE (SET_SRC (elt)) != MEM
7488 || GET_MODE (SET_SRC (elt)) != SImode)
7489 return 0;
7490 newaddr = XEXP (SET_SRC (elt), 0);
7491 if (legitimate_indirect_address_p (newaddr, 0))
7492 {
7493 newoffset = 0;
7494 addr_reg = newaddr;
7495 }
7496 else if (legitimate_offset_address_p (SImode, newaddr, 0))
7497 {
7498 addr_reg = XEXP (newaddr, 0);
7499 newoffset = INTVAL (XEXP (newaddr, 1));
7500 }
7501 else
7502 return 0;
7503 if (REGNO (addr_reg) != base_regno
7504 || newoffset != offset + 4 * i)
7505 return 0;
7506 }
7507
7508 return 1;
7509 }
7510
7511 /* Return 1 for an PARALLEL suitable for stmw. */
7512
7513 int
7514 stmw_operation (op, mode)
7515 rtx op;
7516 enum machine_mode mode ATTRIBUTE_UNUSED;
7517 {
7518 int count = XVECLEN (op, 0);
7519 unsigned int src_regno;
7520 rtx dest_addr;
7521 unsigned int base_regno;
7522 HOST_WIDE_INT offset;
7523 int i;
7524
7525 /* Perform a quick check so we don't blow up below. */
7526 if (count <= 1
7527 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7528 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7529 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7530 return 0;
7531
7532 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7533 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7534
7535 if (src_regno > 31
7536 || count != 32 - (int) src_regno)
7537 return 0;
7538
7539 if (legitimate_indirect_address_p (dest_addr, 0))
7540 {
7541 offset = 0;
7542 base_regno = REGNO (dest_addr);
7543 if (base_regno == 0)
7544 return 0;
7545 }
7546 else if (legitimate_offset_address_p (SImode, dest_addr, 0))
7547 {
7548 offset = INTVAL (XEXP (dest_addr, 1));
7549 base_regno = REGNO (XEXP (dest_addr, 0));
7550 }
7551 else
7552 return 0;
7553
7554 for (i = 0; i < count; i++)
7555 {
7556 rtx elt = XVECEXP (op, 0, i);
7557 rtx newaddr;
7558 rtx addr_reg;
7559 HOST_WIDE_INT newoffset;
7560
7561 if (GET_CODE (elt) != SET
7562 || GET_CODE (SET_SRC (elt)) != REG
7563 || GET_MODE (SET_SRC (elt)) != SImode
7564 || REGNO (SET_SRC (elt)) != src_regno + i
7565 || GET_CODE (SET_DEST (elt)) != MEM
7566 || GET_MODE (SET_DEST (elt)) != SImode)
7567 return 0;
7568 newaddr = XEXP (SET_DEST (elt), 0);
7569 if (legitimate_indirect_address_p (newaddr, 0))
7570 {
7571 newoffset = 0;
7572 addr_reg = newaddr;
7573 }
7574 else if (legitimate_offset_address_p (SImode, newaddr, 0))
7575 {
7576 addr_reg = XEXP (newaddr, 0);
7577 newoffset = INTVAL (XEXP (newaddr, 1));
7578 }
7579 else
7580 return 0;
7581 if (REGNO (addr_reg) != base_regno
7582 || newoffset != offset + 4 * i)
7583 return 0;
7584 }
7585
7586 return 1;
7587 }
7588 \f
7589 /* A validation routine: say whether CODE, a condition code, and MODE
7590 match. The other alternatives either don't make sense or should
7591 never be generated. */
7592
7593 static void
7594 validate_condition_mode (code, mode)
7595 enum rtx_code code;
7596 enum machine_mode mode;
7597 {
7598 if (GET_RTX_CLASS (code) != '<'
7599 || GET_MODE_CLASS (mode) != MODE_CC)
7600 abort ();
7601
7602 /* These don't make sense. */
7603 if ((code == GT || code == LT || code == GE || code == LE)
7604 && mode == CCUNSmode)
7605 abort ();
7606
7607 if ((code == GTU || code == LTU || code == GEU || code == LEU)
7608 && mode != CCUNSmode)
7609 abort ();
7610
7611 if (mode != CCFPmode
7612 && (code == ORDERED || code == UNORDERED
7613 || code == UNEQ || code == LTGT
7614 || code == UNGT || code == UNLT
7615 || code == UNGE || code == UNLE))
7616 abort ();
7617
7618 /* These should never be generated except for
7619 flag_finite_math_only. */
7620 if (mode == CCFPmode
7621 && ! flag_finite_math_only
7622 && (code == LE || code == GE
7623 || code == UNEQ || code == LTGT
7624 || code == UNGT || code == UNLT))
7625 abort ();
7626
7627 /* These are invalid; the information is not there. */
7628 if (mode == CCEQmode
7629 && code != EQ && code != NE)
7630 abort ();
7631 }
7632
7633 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
7634 We only check the opcode against the mode of the CC value here. */
7635
7636 int
7637 branch_comparison_operator (op, mode)
7638 rtx op;
7639 enum machine_mode mode ATTRIBUTE_UNUSED;
7640 {
7641 enum rtx_code code = GET_CODE (op);
7642 enum machine_mode cc_mode;
7643
7644 if (GET_RTX_CLASS (code) != '<')
7645 return 0;
7646
7647 cc_mode = GET_MODE (XEXP (op, 0));
7648 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
7649 return 0;
7650
7651 validate_condition_mode (code, cc_mode);
7652
7653 return 1;
7654 }
7655
7656 /* Return 1 if OP is a comparison operation that is valid for a branch
7657 insn and which is true if the corresponding bit in the CC register
7658 is set. */
7659
7660 int
7661 branch_positive_comparison_operator (op, mode)
7662 rtx op;
7663 enum machine_mode mode;
7664 {
7665 enum rtx_code code;
7666
7667 if (! branch_comparison_operator (op, mode))
7668 return 0;
7669
7670 code = GET_CODE (op);
7671 return (code == EQ || code == LT || code == GT
7672 || (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
7673 || code == LTU || code == GTU
7674 || code == UNORDERED);
7675 }
7676
7677 /* Return 1 if OP is a comparison operation that is valid for an scc
7678 insn: it must be a positive comparison. */
7679
7680 int
7681 scc_comparison_operator (op, mode)
7682 rtx op;
7683 enum machine_mode mode;
7684 {
7685 return branch_positive_comparison_operator (op, mode);
7686 }
7687
7688 int
7689 trap_comparison_operator (op, mode)
7690 rtx op;
7691 enum machine_mode mode;
7692 {
7693 if (mode != VOIDmode && mode != GET_MODE (op))
7694 return 0;
7695 return GET_RTX_CLASS (GET_CODE (op)) == '<';
7696 }
7697
7698 int
7699 boolean_operator (op, mode)
7700 rtx op;
7701 enum machine_mode mode ATTRIBUTE_UNUSED;
7702 {
7703 enum rtx_code code = GET_CODE (op);
7704 return (code == AND || code == IOR || code == XOR);
7705 }
7706
7707 int
7708 boolean_or_operator (op, mode)
7709 rtx op;
7710 enum machine_mode mode ATTRIBUTE_UNUSED;
7711 {
7712 enum rtx_code code = GET_CODE (op);
7713 return (code == IOR || code == XOR);
7714 }
7715
7716 int
7717 min_max_operator (op, mode)
7718 rtx op;
7719 enum machine_mode mode ATTRIBUTE_UNUSED;
7720 {
7721 enum rtx_code code = GET_CODE (op);
7722 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
7723 }
7724 \f
7725 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
7726 mask required to convert the result of a rotate insn into a shift
7727 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
7728
7729 int
7730 includes_lshift_p (shiftop, andop)
7731 rtx shiftop;
7732 rtx andop;
7733 {
7734 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
7735
7736 shift_mask <<= INTVAL (shiftop);
7737
7738 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
7739 }
7740
7741 /* Similar, but for right shift. */
7742
7743 int
7744 includes_rshift_p (shiftop, andop)
7745 rtx shiftop;
7746 rtx andop;
7747 {
7748 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
7749
7750 shift_mask >>= INTVAL (shiftop);
7751
7752 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
7753 }
7754
7755 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
7756 to perform a left shift. It must have exactly SHIFTOP least
7757 significant 0's, then one or more 1's, then zero or more 0's. */
7758
7759 int
7760 includes_rldic_lshift_p (shiftop, andop)
7761 rtx shiftop;
7762 rtx andop;
7763 {
7764 if (GET_CODE (andop) == CONST_INT)
7765 {
7766 HOST_WIDE_INT c, lsb, shift_mask;
7767
7768 c = INTVAL (andop);
7769 if (c == 0 || c == ~0)
7770 return 0;
7771
7772 shift_mask = ~0;
7773 shift_mask <<= INTVAL (shiftop);
7774
7775 /* Find the least significant one bit. */
7776 lsb = c & -c;
7777
7778 /* It must coincide with the LSB of the shift mask. */
7779 if (-lsb != shift_mask)
7780 return 0;
7781
7782 /* Invert to look for the next transition (if any). */
7783 c = ~c;
7784
7785 /* Remove the low group of ones (originally low group of zeros). */
7786 c &= -lsb;
7787
7788 /* Again find the lsb, and check we have all 1's above. */
7789 lsb = c & -c;
7790 return c == -lsb;
7791 }
7792 else if (GET_CODE (andop) == CONST_DOUBLE
7793 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7794 {
7795 HOST_WIDE_INT low, high, lsb;
7796 HOST_WIDE_INT shift_mask_low, shift_mask_high;
7797
7798 low = CONST_DOUBLE_LOW (andop);
7799 if (HOST_BITS_PER_WIDE_INT < 64)
7800 high = CONST_DOUBLE_HIGH (andop);
7801
7802 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
7803 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
7804 return 0;
7805
7806 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7807 {
7808 shift_mask_high = ~0;
7809 if (INTVAL (shiftop) > 32)
7810 shift_mask_high <<= INTVAL (shiftop) - 32;
7811
7812 lsb = high & -high;
7813
7814 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
7815 return 0;
7816
7817 high = ~high;
7818 high &= -lsb;
7819
7820 lsb = high & -high;
7821 return high == -lsb;
7822 }
7823
7824 shift_mask_low = ~0;
7825 shift_mask_low <<= INTVAL (shiftop);
7826
7827 lsb = low & -low;
7828
7829 if (-lsb != shift_mask_low)
7830 return 0;
7831
7832 if (HOST_BITS_PER_WIDE_INT < 64)
7833 high = ~high;
7834 low = ~low;
7835 low &= -lsb;
7836
7837 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7838 {
7839 lsb = high & -high;
7840 return high == -lsb;
7841 }
7842
7843 lsb = low & -low;
7844 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
7845 }
7846 else
7847 return 0;
7848 }
7849
7850 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
7851 to perform a left shift. It must have SHIFTOP or more least
7852 significant 0's, with the remainder of the word 1's. */
7853
7854 int
7855 includes_rldicr_lshift_p (shiftop, andop)
7856 rtx shiftop;
7857 rtx andop;
7858 {
7859 if (GET_CODE (andop) == CONST_INT)
7860 {
7861 HOST_WIDE_INT c, lsb, shift_mask;
7862
7863 shift_mask = ~0;
7864 shift_mask <<= INTVAL (shiftop);
7865 c = INTVAL (andop);
7866
7867 /* Find the least significant one bit. */
7868 lsb = c & -c;
7869
7870 /* It must be covered by the shift mask.
7871 This test also rejects c == 0. */
7872 if ((lsb & shift_mask) == 0)
7873 return 0;
7874
7875 /* Check we have all 1's above the transition, and reject all 1's. */
7876 return c == -lsb && lsb != 1;
7877 }
7878 else if (GET_CODE (andop) == CONST_DOUBLE
7879 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7880 {
7881 HOST_WIDE_INT low, lsb, shift_mask_low;
7882
7883 low = CONST_DOUBLE_LOW (andop);
7884
7885 if (HOST_BITS_PER_WIDE_INT < 64)
7886 {
7887 HOST_WIDE_INT high, shift_mask_high;
7888
7889 high = CONST_DOUBLE_HIGH (andop);
7890
7891 if (low == 0)
7892 {
7893 shift_mask_high = ~0;
7894 if (INTVAL (shiftop) > 32)
7895 shift_mask_high <<= INTVAL (shiftop) - 32;
7896
7897 lsb = high & -high;
7898
7899 if ((lsb & shift_mask_high) == 0)
7900 return 0;
7901
7902 return high == -lsb;
7903 }
7904 if (high != ~0)
7905 return 0;
7906 }
7907
7908 shift_mask_low = ~0;
7909 shift_mask_low <<= INTVAL (shiftop);
7910
7911 lsb = low & -low;
7912
7913 if ((lsb & shift_mask_low) == 0)
7914 return 0;
7915
7916 return low == -lsb && lsb != 1;
7917 }
7918 else
7919 return 0;
7920 }
7921
7922 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
7923 for lfq and stfq insns.
7924
7925 Note reg1 and reg2 *must* be hard registers. To be sure we will
7926 abort if we are passed pseudo registers. */
7927
7928 int
7929 registers_ok_for_quad_peep (reg1, reg2)
7930 rtx reg1, reg2;
7931 {
7932 /* We might have been passed a SUBREG. */
7933 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
7934 return 0;
7935
7936 return (REGNO (reg1) == REGNO (reg2) - 1);
7937 }
7938
7939 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
7940 addr1 and addr2 must be in consecutive memory locations
7941 (addr2 == addr1 + 8). */
7942
7943 int
7944 addrs_ok_for_quad_peep (addr1, addr2)
7945 rtx addr1;
7946 rtx addr2;
7947 {
7948 unsigned int reg1;
7949 int offset1;
7950
7951 /* Extract an offset (if used) from the first addr. */
7952 if (GET_CODE (addr1) == PLUS)
7953 {
7954 /* If not a REG, return zero. */
7955 if (GET_CODE (XEXP (addr1, 0)) != REG)
7956 return 0;
7957 else
7958 {
7959 reg1 = REGNO (XEXP (addr1, 0));
7960 /* The offset must be constant! */
7961 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
7962 return 0;
7963 offset1 = INTVAL (XEXP (addr1, 1));
7964 }
7965 }
7966 else if (GET_CODE (addr1) != REG)
7967 return 0;
7968 else
7969 {
7970 reg1 = REGNO (addr1);
7971 /* This was a simple (mem (reg)) expression. Offset is 0. */
7972 offset1 = 0;
7973 }
7974
7975 /* Make sure the second address is a (mem (plus (reg) (const_int)))
7976 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
7977 register as addr1. */
7978 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
7979 return 1;
7980 if (GET_CODE (addr2) != PLUS)
7981 return 0;
7982
7983 if (GET_CODE (XEXP (addr2, 0)) != REG
7984 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
7985 return 0;
7986
7987 if (reg1 != REGNO (XEXP (addr2, 0)))
7988 return 0;
7989
7990 /* The offset for the second addr must be 8 more than the first addr. */
7991 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
7992 return 0;
7993
7994 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7995 instructions. */
7996 return 1;
7997 }
7998 \f
7999 /* Return the register class of a scratch register needed to copy IN into
8000 or out of a register in CLASS in MODE. If it can be done directly,
8001 NO_REGS is returned. */
8002
8003 enum reg_class
8004 secondary_reload_class (class, mode, in)
8005 enum reg_class class;
8006 enum machine_mode mode ATTRIBUTE_UNUSED;
8007 rtx in;
8008 {
8009 int regno;
8010
8011 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
8012 #if TARGET_MACHO
8013 && MACHOPIC_INDIRECT
8014 #endif
8015 ))
8016 {
8017 /* We cannot copy a symbolic operand directly into anything
8018 other than BASE_REGS for TARGET_ELF. So indicate that a
8019 register from BASE_REGS is needed as an intermediate
8020 register.
8021
8022 On Darwin, pic addresses require a load from memory, which
8023 needs a base register. */
8024 if (class != BASE_REGS
8025 && (GET_CODE (in) == SYMBOL_REF
8026 || GET_CODE (in) == HIGH
8027 || GET_CODE (in) == LABEL_REF
8028 || GET_CODE (in) == CONST))
8029 return BASE_REGS;
8030 }
8031
8032 if (GET_CODE (in) == REG)
8033 {
8034 regno = REGNO (in);
8035 if (regno >= FIRST_PSEUDO_REGISTER)
8036 {
8037 regno = true_regnum (in);
8038 if (regno >= FIRST_PSEUDO_REGISTER)
8039 regno = -1;
8040 }
8041 }
8042 else if (GET_CODE (in) == SUBREG)
8043 {
8044 regno = true_regnum (in);
8045 if (regno >= FIRST_PSEUDO_REGISTER)
8046 regno = -1;
8047 }
8048 else
8049 regno = -1;
8050
8051 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
8052 into anything. */
8053 if (class == GENERAL_REGS || class == BASE_REGS
8054 || (regno >= 0 && INT_REGNO_P (regno)))
8055 return NO_REGS;
8056
8057 /* Constants, memory, and FP registers can go into FP registers. */
8058 if ((regno == -1 || FP_REGNO_P (regno))
8059 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
8060 return NO_REGS;
8061
8062 /* Memory, and AltiVec registers can go into AltiVec registers. */
8063 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
8064 && class == ALTIVEC_REGS)
8065 return NO_REGS;
8066
8067 /* We can copy among the CR registers. */
8068 if ((class == CR_REGS || class == CR0_REGS)
8069 && regno >= 0 && CR_REGNO_P (regno))
8070 return NO_REGS;
8071
8072 /* Otherwise, we need GENERAL_REGS. */
8073 return GENERAL_REGS;
8074 }
8075 \f
8076 /* Given a comparison operation, return the bit number in CCR to test. We
8077 know this is a valid comparison.
8078
8079 SCC_P is 1 if this is for an scc. That means that %D will have been
8080 used instead of %C, so the bits will be in different places.
8081
8082 Return -1 if OP isn't a valid comparison for some reason. */
8083
8084 int
8085 ccr_bit (op, scc_p)
8086 rtx op;
8087 int scc_p;
8088 {
8089 enum rtx_code code = GET_CODE (op);
8090 enum machine_mode cc_mode;
8091 int cc_regnum;
8092 int base_bit;
8093 rtx reg;
8094
8095 if (GET_RTX_CLASS (code) != '<')
8096 return -1;
8097
8098 reg = XEXP (op, 0);
8099
8100 if (GET_CODE (reg) != REG
8101 || ! CR_REGNO_P (REGNO (reg)))
8102 abort ();
8103
8104 cc_mode = GET_MODE (reg);
8105 cc_regnum = REGNO (reg);
8106 base_bit = 4 * (cc_regnum - CR0_REGNO);
8107
8108 validate_condition_mode (code, cc_mode);
8109
8110 /* When generating a sCOND operation, only positive conditions are
8111 allowed. */
8112 if (scc_p && code != EQ && code != GT && code != LT && code != UNORDERED
8113 && code != GTU && code != LTU)
8114 abort ();
8115
8116 switch (code)
8117 {
8118 case NE:
8119 if (TARGET_E500 && !TARGET_FPRS
8120 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
8121 return base_bit + 1;
8122 return scc_p ? base_bit + 3 : base_bit + 2;
8123 case EQ:
8124 if (TARGET_E500 && !TARGET_FPRS
8125 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
8126 return base_bit + 1;
8127 return base_bit + 2;
8128 case GT: case GTU: case UNLE:
8129 return base_bit + 1;
8130 case LT: case LTU: case UNGE:
8131 return base_bit;
8132 case ORDERED: case UNORDERED:
8133 return base_bit + 3;
8134
8135 case GE: case GEU:
8136 /* If scc, we will have done a cror to put the bit in the
8137 unordered position. So test that bit. For integer, this is ! LT
8138 unless this is an scc insn. */
8139 return scc_p ? base_bit + 3 : base_bit;
8140
8141 case LE: case LEU:
8142 return scc_p ? base_bit + 3 : base_bit + 1;
8143
8144 default:
8145 abort ();
8146 }
8147 }
8148 \f
8149 /* Return the GOT register. */
8150
8151 struct rtx_def *
8152 rs6000_got_register (value)
8153 rtx value ATTRIBUTE_UNUSED;
8154 {
8155 /* The second flow pass currently (June 1999) can't update
8156 regs_ever_live without disturbing other parts of the compiler, so
8157 update it here to make the prolog/epilogue code happy. */
8158 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
8159 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
8160
8161 current_function_uses_pic_offset_table = 1;
8162
8163 return pic_offset_table_rtx;
8164 }
8165 \f
8166 /* Function to init struct machine_function.
8167 This will be called, via a pointer variable,
8168 from push_function_context. */
8169
8170 static struct machine_function *
8171 rs6000_init_machine_status ()
8172 {
8173 return ggc_alloc_cleared (sizeof (machine_function));
8174 }
8175 \f
8176 /* These macros test for integers and extract the low-order bits. */
8177 #define INT_P(X) \
8178 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
8179 && GET_MODE (X) == VOIDmode)
8180
8181 #define INT_LOWPART(X) \
8182 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
8183
8184 int
8185 extract_MB (op)
8186 rtx op;
8187 {
8188 int i;
8189 unsigned long val = INT_LOWPART (op);
8190
8191 /* If the high bit is zero, the value is the first 1 bit we find
8192 from the left. */
8193 if ((val & 0x80000000) == 0)
8194 {
8195 if ((val & 0xffffffff) == 0)
8196 abort ();
8197
8198 i = 1;
8199 while (((val <<= 1) & 0x80000000) == 0)
8200 ++i;
8201 return i;
8202 }
8203
8204 /* If the high bit is set and the low bit is not, or the mask is all
8205 1's, the value is zero. */
8206 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
8207 return 0;
8208
8209 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8210 from the right. */
8211 i = 31;
8212 while (((val >>= 1) & 1) != 0)
8213 --i;
8214
8215 return i;
8216 }
8217
8218 int
8219 extract_ME (op)
8220 rtx op;
8221 {
8222 int i;
8223 unsigned long val = INT_LOWPART (op);
8224
8225 /* If the low bit is zero, the value is the first 1 bit we find from
8226 the right. */
8227 if ((val & 1) == 0)
8228 {
8229 if ((val & 0xffffffff) == 0)
8230 abort ();
8231
8232 i = 30;
8233 while (((val >>= 1) & 1) == 0)
8234 --i;
8235
8236 return i;
8237 }
8238
8239 /* If the low bit is set and the high bit is not, or the mask is all
8240 1's, the value is 31. */
8241 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
8242 return 31;
8243
8244 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8245 from the left. */
8246 i = 0;
8247 while (((val <<= 1) & 0x80000000) != 0)
8248 ++i;
8249
8250 return i;
8251 }
8252
8253 /* Locate some local-dynamic symbol still in use by this function
8254 so that we can print its name in some tls_ld pattern. */
8255
8256 static const char *
8257 rs6000_get_some_local_dynamic_name ()
8258 {
8259 rtx insn;
8260
8261 if (cfun->machine->some_ld_name)
8262 return cfun->machine->some_ld_name;
8263
8264 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
8265 if (INSN_P (insn)
8266 && for_each_rtx (&PATTERN (insn),
8267 rs6000_get_some_local_dynamic_name_1, 0))
8268 return cfun->machine->some_ld_name;
8269
8270 abort ();
8271 }
8272
8273 /* Helper function for rs6000_get_some_local_dynamic_name. */
8274
8275 static int
8276 rs6000_get_some_local_dynamic_name_1 (px, data)
8277 rtx *px;
8278 void *data ATTRIBUTE_UNUSED;
8279 {
8280 rtx x = *px;
8281
8282 if (GET_CODE (x) == SYMBOL_REF)
8283 {
8284 const char *str = XSTR (x, 0);
8285 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
8286 {
8287 cfun->machine->some_ld_name = str;
8288 return 1;
8289 }
8290 }
8291
8292 return 0;
8293 }
8294
8295 /* Print an operand. Recognize special options, documented below. */
8296
8297 #if TARGET_ELF
8298 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8299 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
8300 #else
8301 #define SMALL_DATA_RELOC "sda21"
8302 #define SMALL_DATA_REG 0
8303 #endif
8304
8305 void
8306 print_operand (file, x, code)
8307 FILE *file;
8308 rtx x;
8309 int code;
8310 {
8311 int i;
8312 HOST_WIDE_INT val;
8313 unsigned HOST_WIDE_INT uval;
8314
8315 switch (code)
8316 {
8317 case '.':
8318 /* Write out an instruction after the call which may be replaced
8319 with glue code by the loader. This depends on the AIX version. */
8320 asm_fprintf (file, RS6000_CALL_GLUE);
8321 return;
8322
8323 /* %a is output_address. */
8324
8325 case 'A':
8326 /* If X is a constant integer whose low-order 5 bits are zero,
8327 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
8328 in the AIX assembler where "sri" with a zero shift count
8329 writes a trash instruction. */
8330 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
8331 putc ('l', file);
8332 else
8333 putc ('r', file);
8334 return;
8335
8336 case 'b':
8337 /* If constant, low-order 16 bits of constant, unsigned.
8338 Otherwise, write normally. */
8339 if (INT_P (x))
8340 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
8341 else
8342 print_operand (file, x, 0);
8343 return;
8344
8345 case 'B':
8346 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
8347 for 64-bit mask direction. */
8348 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
8349 return;
8350
8351 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
8352 output_operand. */
8353
8354 case 'E':
8355 /* X is a CR register. Print the number of the EQ bit of the CR */
8356 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8357 output_operand_lossage ("invalid %%E value");
8358 else
8359 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
8360 return;
8361
8362 case 'f':
8363 /* X is a CR register. Print the shift count needed to move it
8364 to the high-order four bits. */
8365 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8366 output_operand_lossage ("invalid %%f value");
8367 else
8368 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
8369 return;
8370
8371 case 'F':
8372 /* Similar, but print the count for the rotate in the opposite
8373 direction. */
8374 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8375 output_operand_lossage ("invalid %%F value");
8376 else
8377 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
8378 return;
8379
8380 case 'G':
8381 /* X is a constant integer. If it is negative, print "m",
8382 otherwise print "z". This is to make an aze or ame insn. */
8383 if (GET_CODE (x) != CONST_INT)
8384 output_operand_lossage ("invalid %%G value");
8385 else if (INTVAL (x) >= 0)
8386 putc ('z', file);
8387 else
8388 putc ('m', file);
8389 return;
8390
8391 case 'h':
8392 /* If constant, output low-order five bits. Otherwise, write
8393 normally. */
8394 if (INT_P (x))
8395 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
8396 else
8397 print_operand (file, x, 0);
8398 return;
8399
8400 case 'H':
8401 /* If constant, output low-order six bits. Otherwise, write
8402 normally. */
8403 if (INT_P (x))
8404 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
8405 else
8406 print_operand (file, x, 0);
8407 return;
8408
8409 case 'I':
8410 /* Print `i' if this is a constant, else nothing. */
8411 if (INT_P (x))
8412 putc ('i', file);
8413 return;
8414
8415 case 'j':
8416 /* Write the bit number in CCR for jump. */
8417 i = ccr_bit (x, 0);
8418 if (i == -1)
8419 output_operand_lossage ("invalid %%j code");
8420 else
8421 fprintf (file, "%d", i);
8422 return;
8423
8424 case 'J':
8425 /* Similar, but add one for shift count in rlinm for scc and pass
8426 scc flag to `ccr_bit'. */
8427 i = ccr_bit (x, 1);
8428 if (i == -1)
8429 output_operand_lossage ("invalid %%J code");
8430 else
8431 /* If we want bit 31, write a shift count of zero, not 32. */
8432 fprintf (file, "%d", i == 31 ? 0 : i + 1);
8433 return;
8434
8435 case 'k':
8436 /* X must be a constant. Write the 1's complement of the
8437 constant. */
8438 if (! INT_P (x))
8439 output_operand_lossage ("invalid %%k value");
8440 else
8441 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
8442 return;
8443
8444 case 'K':
8445 /* X must be a symbolic constant on ELF. Write an
8446 expression suitable for an 'addi' that adds in the low 16
8447 bits of the MEM. */
8448 if (GET_CODE (x) != CONST)
8449 {
8450 print_operand_address (file, x);
8451 fputs ("@l", file);
8452 }
8453 else
8454 {
8455 if (GET_CODE (XEXP (x, 0)) != PLUS
8456 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
8457 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
8458 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
8459 output_operand_lossage ("invalid %%K value");
8460 print_operand_address (file, XEXP (XEXP (x, 0), 0));
8461 fputs ("@l", file);
8462 /* For GNU as, there must be a non-alphanumeric character
8463 between 'l' and the number. The '-' is added by
8464 print_operand() already. */
8465 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
8466 fputs ("+", file);
8467 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
8468 }
8469 return;
8470
8471 /* %l is output_asm_label. */
8472
8473 case 'L':
8474 /* Write second word of DImode or DFmode reference. Works on register
8475 or non-indexed memory only. */
8476 if (GET_CODE (x) == REG)
8477 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
8478 else if (GET_CODE (x) == MEM)
8479 {
8480 /* Handle possible auto-increment. Since it is pre-increment and
8481 we have already done it, we can just use an offset of word. */
8482 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8483 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8484 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
8485 UNITS_PER_WORD));
8486 else
8487 output_address (XEXP (adjust_address_nv (x, SImode,
8488 UNITS_PER_WORD),
8489 0));
8490
8491 if (small_data_operand (x, GET_MODE (x)))
8492 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8493 reg_names[SMALL_DATA_REG]);
8494 }
8495 return;
8496
8497 case 'm':
8498 /* MB value for a mask operand. */
8499 if (! mask_operand (x, SImode))
8500 output_operand_lossage ("invalid %%m value");
8501
8502 fprintf (file, "%d", extract_MB (x));
8503 return;
8504
8505 case 'M':
8506 /* ME value for a mask operand. */
8507 if (! mask_operand (x, SImode))
8508 output_operand_lossage ("invalid %%M value");
8509
8510 fprintf (file, "%d", extract_ME (x));
8511 return;
8512
8513 /* %n outputs the negative of its operand. */
8514
8515 case 'N':
8516 /* Write the number of elements in the vector times 4. */
8517 if (GET_CODE (x) != PARALLEL)
8518 output_operand_lossage ("invalid %%N value");
8519 else
8520 fprintf (file, "%d", XVECLEN (x, 0) * 4);
8521 return;
8522
8523 case 'O':
8524 /* Similar, but subtract 1 first. */
8525 if (GET_CODE (x) != PARALLEL)
8526 output_operand_lossage ("invalid %%O value");
8527 else
8528 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
8529 return;
8530
8531 case 'p':
8532 /* X is a CONST_INT that is a power of two. Output the logarithm. */
8533 if (! INT_P (x)
8534 || INT_LOWPART (x) < 0
8535 || (i = exact_log2 (INT_LOWPART (x))) < 0)
8536 output_operand_lossage ("invalid %%p value");
8537 else
8538 fprintf (file, "%d", i);
8539 return;
8540
8541 case 'P':
8542 /* The operand must be an indirect memory reference. The result
8543 is the register number. */
8544 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
8545 || REGNO (XEXP (x, 0)) >= 32)
8546 output_operand_lossage ("invalid %%P value");
8547 else
8548 fprintf (file, "%d", REGNO (XEXP (x, 0)));
8549 return;
8550
8551 case 'q':
8552 /* This outputs the logical code corresponding to a boolean
8553 expression. The expression may have one or both operands
8554 negated (if one, only the first one). For condition register
8555 logical operations, it will also treat the negated
8556 CR codes as NOTs, but not handle NOTs of them. */
8557 {
8558 const char *const *t = 0;
8559 const char *s;
8560 enum rtx_code code = GET_CODE (x);
8561 static const char * const tbl[3][3] = {
8562 { "and", "andc", "nor" },
8563 { "or", "orc", "nand" },
8564 { "xor", "eqv", "xor" } };
8565
8566 if (code == AND)
8567 t = tbl[0];
8568 else if (code == IOR)
8569 t = tbl[1];
8570 else if (code == XOR)
8571 t = tbl[2];
8572 else
8573 output_operand_lossage ("invalid %%q value");
8574
8575 if (GET_CODE (XEXP (x, 0)) != NOT)
8576 s = t[0];
8577 else
8578 {
8579 if (GET_CODE (XEXP (x, 1)) == NOT)
8580 s = t[2];
8581 else
8582 s = t[1];
8583 }
8584
8585 fputs (s, file);
8586 }
8587 return;
8588
8589 case 'Q':
8590 if (TARGET_MFCRF)
8591 fputc (',',file);
8592 /* FALLTHRU */
8593 else
8594 return;
8595
8596 case 'R':
8597 /* X is a CR register. Print the mask for `mtcrf'. */
8598 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8599 output_operand_lossage ("invalid %%R value");
8600 else
8601 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
8602 return;
8603
8604 case 's':
8605 /* Low 5 bits of 32 - value */
8606 if (! INT_P (x))
8607 output_operand_lossage ("invalid %%s value");
8608 else
8609 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
8610 return;
8611
8612 case 'S':
8613 /* PowerPC64 mask position. All 0's is excluded.
8614 CONST_INT 32-bit mask is considered sign-extended so any
8615 transition must occur within the CONST_INT, not on the boundary. */
8616 if (! mask64_operand (x, DImode))
8617 output_operand_lossage ("invalid %%S value");
8618
8619 uval = INT_LOWPART (x);
8620
8621 if (uval & 1) /* Clear Left */
8622 {
8623 #if HOST_BITS_PER_WIDE_INT > 64
8624 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
8625 #endif
8626 i = 64;
8627 }
8628 else /* Clear Right */
8629 {
8630 uval = ~uval;
8631 #if HOST_BITS_PER_WIDE_INT > 64
8632 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
8633 #endif
8634 i = 63;
8635 }
8636 while (uval != 0)
8637 --i, uval >>= 1;
8638 if (i < 0)
8639 abort ();
8640 fprintf (file, "%d", i);
8641 return;
8642
8643 case 't':
8644 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
8645 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
8646 abort ();
8647
8648 /* Bit 3 is OV bit. */
8649 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
8650
8651 /* If we want bit 31, write a shift count of zero, not 32. */
8652 fprintf (file, "%d", i == 31 ? 0 : i + 1);
8653 return;
8654
8655 case 'T':
8656 /* Print the symbolic name of a branch target register. */
8657 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
8658 && REGNO (x) != COUNT_REGISTER_REGNUM))
8659 output_operand_lossage ("invalid %%T value");
8660 else if (REGNO (x) == LINK_REGISTER_REGNUM)
8661 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
8662 else
8663 fputs ("ctr", file);
8664 return;
8665
8666 case 'u':
8667 /* High-order 16 bits of constant for use in unsigned operand. */
8668 if (! INT_P (x))
8669 output_operand_lossage ("invalid %%u value");
8670 else
8671 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8672 (INT_LOWPART (x) >> 16) & 0xffff);
8673 return;
8674
8675 case 'v':
8676 /* High-order 16 bits of constant for use in signed operand. */
8677 if (! INT_P (x))
8678 output_operand_lossage ("invalid %%v value");
8679 else
8680 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8681 (INT_LOWPART (x) >> 16) & 0xffff);
8682 return;
8683
8684 case 'U':
8685 /* Print `u' if this has an auto-increment or auto-decrement. */
8686 if (GET_CODE (x) == MEM
8687 && (GET_CODE (XEXP (x, 0)) == PRE_INC
8688 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
8689 putc ('u', file);
8690 return;
8691
8692 case 'V':
8693 /* Print the trap code for this operand. */
8694 switch (GET_CODE (x))
8695 {
8696 case EQ:
8697 fputs ("eq", file); /* 4 */
8698 break;
8699 case NE:
8700 fputs ("ne", file); /* 24 */
8701 break;
8702 case LT:
8703 fputs ("lt", file); /* 16 */
8704 break;
8705 case LE:
8706 fputs ("le", file); /* 20 */
8707 break;
8708 case GT:
8709 fputs ("gt", file); /* 8 */
8710 break;
8711 case GE:
8712 fputs ("ge", file); /* 12 */
8713 break;
8714 case LTU:
8715 fputs ("llt", file); /* 2 */
8716 break;
8717 case LEU:
8718 fputs ("lle", file); /* 6 */
8719 break;
8720 case GTU:
8721 fputs ("lgt", file); /* 1 */
8722 break;
8723 case GEU:
8724 fputs ("lge", file); /* 5 */
8725 break;
8726 default:
8727 abort ();
8728 }
8729 break;
8730
8731 case 'w':
8732 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
8733 normally. */
8734 if (INT_P (x))
8735 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
8736 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
8737 else
8738 print_operand (file, x, 0);
8739 return;
8740
8741 case 'W':
8742 /* MB value for a PowerPC64 rldic operand. */
8743 val = (GET_CODE (x) == CONST_INT
8744 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
8745
8746 if (val < 0)
8747 i = -1;
8748 else
8749 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
8750 if ((val <<= 1) < 0)
8751 break;
8752
8753 #if HOST_BITS_PER_WIDE_INT == 32
8754 if (GET_CODE (x) == CONST_INT && i >= 0)
8755 i += 32; /* zero-extend high-part was all 0's */
8756 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
8757 {
8758 val = CONST_DOUBLE_LOW (x);
8759
8760 if (val == 0)
8761 abort ();
8762 else if (val < 0)
8763 --i;
8764 else
8765 for ( ; i < 64; i++)
8766 if ((val <<= 1) < 0)
8767 break;
8768 }
8769 #endif
8770
8771 fprintf (file, "%d", i + 1);
8772 return;
8773
8774 case 'X':
8775 if (GET_CODE (x) == MEM
8776 && legitimate_indexed_address_p (XEXP (x, 0), 0))
8777 putc ('x', file);
8778 return;
8779
8780 case 'Y':
8781 /* Like 'L', for third word of TImode */
8782 if (GET_CODE (x) == REG)
8783 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
8784 else if (GET_CODE (x) == MEM)
8785 {
8786 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8787 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8788 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
8789 else
8790 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
8791 if (small_data_operand (x, GET_MODE (x)))
8792 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8793 reg_names[SMALL_DATA_REG]);
8794 }
8795 return;
8796
8797 case 'z':
8798 /* X is a SYMBOL_REF. Write out the name preceded by a
8799 period and without any trailing data in brackets. Used for function
8800 names. If we are configured for System V (or the embedded ABI) on
8801 the PowerPC, do not emit the period, since those systems do not use
8802 TOCs and the like. */
8803 if (GET_CODE (x) != SYMBOL_REF)
8804 abort ();
8805
8806 if (XSTR (x, 0)[0] != '.')
8807 {
8808 switch (DEFAULT_ABI)
8809 {
8810 default:
8811 abort ();
8812
8813 case ABI_AIX:
8814 putc ('.', file);
8815 break;
8816
8817 case ABI_V4:
8818 case ABI_DARWIN:
8819 break;
8820 }
8821 }
8822 if (TARGET_AIX)
8823 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
8824 else
8825 assemble_name (file, XSTR (x, 0));
8826 return;
8827
8828 case 'Z':
8829 /* Like 'L', for last word of TImode. */
8830 if (GET_CODE (x) == REG)
8831 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
8832 else if (GET_CODE (x) == MEM)
8833 {
8834 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8835 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8836 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
8837 else
8838 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
8839 if (small_data_operand (x, GET_MODE (x)))
8840 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8841 reg_names[SMALL_DATA_REG]);
8842 }
8843 return;
8844
8845 /* Print AltiVec or SPE memory operand. */
8846 case 'y':
8847 {
8848 rtx tmp;
8849
8850 if (GET_CODE (x) != MEM)
8851 abort ();
8852
8853 tmp = XEXP (x, 0);
8854
8855 if (TARGET_E500)
8856 {
8857 /* Handle [reg]. */
8858 if (GET_CODE (tmp) == REG)
8859 {
8860 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
8861 break;
8862 }
8863 /* Handle [reg+UIMM]. */
8864 else if (GET_CODE (tmp) == PLUS &&
8865 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
8866 {
8867 int x;
8868
8869 if (GET_CODE (XEXP (tmp, 0)) != REG)
8870 abort ();
8871
8872 x = INTVAL (XEXP (tmp, 1));
8873 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
8874 break;
8875 }
8876
8877 /* Fall through. Must be [reg+reg]. */
8878 }
8879 if (GET_CODE (tmp) == REG)
8880 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
8881 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
8882 {
8883 if (REGNO (XEXP (tmp, 0)) == 0)
8884 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
8885 reg_names[ REGNO (XEXP (tmp, 0)) ]);
8886 else
8887 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
8888 reg_names[ REGNO (XEXP (tmp, 1)) ]);
8889 }
8890 else
8891 abort ();
8892 break;
8893 }
8894
8895 case 0:
8896 if (GET_CODE (x) == REG)
8897 fprintf (file, "%s", reg_names[REGNO (x)]);
8898 else if (GET_CODE (x) == MEM)
8899 {
8900 /* We need to handle PRE_INC and PRE_DEC here, since we need to
8901 know the width from the mode. */
8902 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
8903 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
8904 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8905 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
8906 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
8907 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8908 else
8909 output_address (XEXP (x, 0));
8910 }
8911 else
8912 output_addr_const (file, x);
8913 return;
8914
8915 case '&':
8916 assemble_name (file, rs6000_get_some_local_dynamic_name ());
8917 return;
8918
8919 default:
8920 output_operand_lossage ("invalid %%xn code");
8921 }
8922 }
8923 \f
8924 /* Print the address of an operand. */
8925
8926 void
8927 print_operand_address (file, x)
8928 FILE *file;
8929 rtx x;
8930 {
8931 if (GET_CODE (x) == REG)
8932 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
8933 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
8934 || GET_CODE (x) == LABEL_REF)
8935 {
8936 output_addr_const (file, x);
8937 if (small_data_operand (x, GET_MODE (x)))
8938 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8939 reg_names[SMALL_DATA_REG]);
8940 else if (TARGET_TOC)
8941 abort ();
8942 }
8943 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
8944 {
8945 if (REGNO (XEXP (x, 0)) == 0)
8946 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
8947 reg_names[ REGNO (XEXP (x, 0)) ]);
8948 else
8949 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
8950 reg_names[ REGNO (XEXP (x, 1)) ]);
8951 }
8952 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
8953 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
8954 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
8955 #if TARGET_ELF
8956 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8957 && CONSTANT_P (XEXP (x, 1)))
8958 {
8959 output_addr_const (file, XEXP (x, 1));
8960 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8961 }
8962 #endif
8963 #if TARGET_MACHO
8964 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8965 && CONSTANT_P (XEXP (x, 1)))
8966 {
8967 fprintf (file, "lo16(");
8968 output_addr_const (file, XEXP (x, 1));
8969 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8970 }
8971 #endif
8972 else if (legitimate_constant_pool_address_p (x))
8973 {
8974 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
8975 {
8976 rtx contains_minus = XEXP (x, 1);
8977 rtx minus, symref;
8978 const char *name;
8979
8980 /* Find the (minus (sym) (toc)) buried in X, and temporarily
8981 turn it into (sym) for output_addr_const. */
8982 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
8983 contains_minus = XEXP (contains_minus, 0);
8984
8985 minus = XEXP (contains_minus, 0);
8986 symref = XEXP (minus, 0);
8987 XEXP (contains_minus, 0) = symref;
8988 if (TARGET_ELF)
8989 {
8990 char *newname;
8991
8992 name = XSTR (symref, 0);
8993 newname = alloca (strlen (name) + sizeof ("@toc"));
8994 strcpy (newname, name);
8995 strcat (newname, "@toc");
8996 XSTR (symref, 0) = newname;
8997 }
8998 output_addr_const (file, XEXP (x, 1));
8999 if (TARGET_ELF)
9000 XSTR (symref, 0) = name;
9001 XEXP (contains_minus, 0) = minus;
9002 }
9003 else
9004 output_addr_const (file, XEXP (x, 1));
9005
9006 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
9007 }
9008 else
9009 abort ();
9010 }
9011 \f
9012 /* Target hook for assembling integer objects. The PowerPC version has
9013 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
9014 is defined. It also needs to handle DI-mode objects on 64-bit
9015 targets. */
9016
9017 static bool
9018 rs6000_assemble_integer (x, size, aligned_p)
9019 rtx x;
9020 unsigned int size;
9021 int aligned_p;
9022 {
9023 #ifdef RELOCATABLE_NEEDS_FIXUP
9024 /* Special handling for SI values. */
9025 if (size == 4 && aligned_p)
9026 {
9027 extern int in_toc_section PARAMS ((void));
9028 static int recurse = 0;
9029
9030 /* For -mrelocatable, we mark all addresses that need to be fixed up
9031 in the .fixup section. */
9032 if (TARGET_RELOCATABLE
9033 && !in_toc_section ()
9034 && !in_text_section ()
9035 && !recurse
9036 && GET_CODE (x) != CONST_INT
9037 && GET_CODE (x) != CONST_DOUBLE
9038 && CONSTANT_P (x))
9039 {
9040 char buf[256];
9041
9042 recurse = 1;
9043 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
9044 fixuplabelno++;
9045 ASM_OUTPUT_LABEL (asm_out_file, buf);
9046 fprintf (asm_out_file, "\t.long\t(");
9047 output_addr_const (asm_out_file, x);
9048 fprintf (asm_out_file, ")@fixup\n");
9049 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
9050 ASM_OUTPUT_ALIGN (asm_out_file, 2);
9051 fprintf (asm_out_file, "\t.long\t");
9052 assemble_name (asm_out_file, buf);
9053 fprintf (asm_out_file, "\n\t.previous\n");
9054 recurse = 0;
9055 return true;
9056 }
9057 /* Remove initial .'s to turn a -mcall-aixdesc function
9058 address into the address of the descriptor, not the function
9059 itself. */
9060 else if (GET_CODE (x) == SYMBOL_REF
9061 && XSTR (x, 0)[0] == '.'
9062 && DEFAULT_ABI == ABI_AIX)
9063 {
9064 const char *name = XSTR (x, 0);
9065 while (*name == '.')
9066 name++;
9067
9068 fprintf (asm_out_file, "\t.long\t%s\n", name);
9069 return true;
9070 }
9071 }
9072 #endif /* RELOCATABLE_NEEDS_FIXUP */
9073 return default_assemble_integer (x, size, aligned_p);
9074 }
9075
9076 #ifdef HAVE_GAS_HIDDEN
9077 /* Emit an assembler directive to set symbol visibility for DECL to
9078 VISIBILITY_TYPE. */
9079
9080 static void
9081 rs6000_assemble_visibility (decl, vis)
9082 tree decl;
9083 int vis;
9084 {
9085 /* Functions need to have their entry point symbol visibility set as
9086 well as their descriptor symbol visibility. */
9087 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
9088 {
9089 static const char * const visibility_types[] = {
9090 NULL, "internal", "hidden", "protected"
9091 };
9092
9093 const char *name, *type;
9094
9095 name = ((* targetm.strip_name_encoding)
9096 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
9097 type = visibility_types[vis];
9098
9099 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
9100 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
9101 }
9102 else
9103 default_assemble_visibility (decl, vis);
9104 }
9105 #endif
9106 \f
9107 enum rtx_code
9108 rs6000_reverse_condition (mode, code)
9109 enum machine_mode mode;
9110 enum rtx_code code;
9111 {
9112 /* Reversal of FP compares takes care -- an ordered compare
9113 becomes an unordered compare and vice versa. */
9114 if (mode == CCFPmode
9115 && (!flag_finite_math_only
9116 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
9117 || code == UNEQ || code == LTGT))
9118 return reverse_condition_maybe_unordered (code);
9119 else
9120 return reverse_condition (code);
9121 }
9122
9123 /* Generate a compare for CODE. Return a brand-new rtx that
9124 represents the result of the compare. */
9125
9126 static rtx
9127 rs6000_generate_compare (code)
9128 enum rtx_code code;
9129 {
9130 enum machine_mode comp_mode;
9131 rtx compare_result;
9132
9133 if (rs6000_compare_fp_p)
9134 comp_mode = CCFPmode;
9135 else if (code == GTU || code == LTU
9136 || code == GEU || code == LEU)
9137 comp_mode = CCUNSmode;
9138 else
9139 comp_mode = CCmode;
9140
9141 /* First, the compare. */
9142 compare_result = gen_reg_rtx (comp_mode);
9143
9144 /* SPE FP compare instructions on the GPRs. Yuck! */
9145 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
9146 && rs6000_compare_fp_p)
9147 {
9148 rtx cmp, or1, or2, or_result, compare_result2;
9149
9150 switch (code)
9151 {
9152 case EQ:
9153 case UNEQ:
9154 case NE:
9155 case LTGT:
9156 cmp = flag_finite_math_only
9157 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
9158 rs6000_compare_op1)
9159 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
9160 rs6000_compare_op1);
9161 break;
9162 case GT:
9163 case GTU:
9164 case UNGT:
9165 case UNGE:
9166 case GE:
9167 case GEU:
9168 cmp = flag_finite_math_only
9169 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
9170 rs6000_compare_op1)
9171 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
9172 rs6000_compare_op1);
9173 break;
9174 case LT:
9175 case LTU:
9176 case UNLT:
9177 case UNLE:
9178 case LE:
9179 case LEU:
9180 cmp = flag_finite_math_only
9181 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
9182 rs6000_compare_op1)
9183 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
9184 rs6000_compare_op1);
9185 break;
9186 default:
9187 abort ();
9188 }
9189
9190 /* Synthesize LE and GE from LT/GT || EQ. */
9191 if (code == LE || code == GE || code == LEU || code == GEU)
9192 {
9193 /* Synthesize GE/LE frome GT/LT || EQ. */
9194
9195 emit_insn (cmp);
9196
9197 switch (code)
9198 {
9199 case LE: code = LT; break;
9200 case GE: code = GT; break;
9201 case LEU: code = LT; break;
9202 case GEU: code = GT; break;
9203 default: abort ();
9204 }
9205
9206 or1 = gen_reg_rtx (SImode);
9207 or2 = gen_reg_rtx (SImode);
9208 or_result = gen_reg_rtx (CCEQmode);
9209 compare_result2 = gen_reg_rtx (CCFPmode);
9210
9211 /* Do the EQ. */
9212 cmp = flag_finite_math_only
9213 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
9214 rs6000_compare_op1)
9215 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
9216 rs6000_compare_op1);
9217 emit_insn (cmp);
9218
9219 /* The MC8540 FP compare instructions set the CR bits
9220 differently than other PPC compare instructions. For
9221 that matter, there is no generic test instruction, but a
9222 testgt, testlt, and testeq. For a true condition, bit 2
9223 is set (x1xx) in the CR. Following the traditional CR
9224 values:
9225
9226 LT GT EQ OV
9227 bit3 bit2 bit1 bit0
9228
9229 ... bit 2 would be a GT CR alias, so later on we
9230 look in the GT bits for the branch instructions.
9231 However, we must be careful to emit correct RTL in
9232 the meantime, so optimizations don't get confused. */
9233
9234 or1 = gen_rtx (NE, SImode, compare_result, const0_rtx);
9235 or2 = gen_rtx (NE, SImode, compare_result2, const0_rtx);
9236
9237 /* OR them together. */
9238 cmp = gen_rtx_SET (VOIDmode, or_result,
9239 gen_rtx_COMPARE (CCEQmode,
9240 gen_rtx_IOR (SImode, or1, or2),
9241 const_true_rtx));
9242 compare_result = or_result;
9243 code = EQ;
9244 }
9245 else
9246 {
9247 /* We only care about 1 bit (x1xx), so map everything to NE to
9248 maintain rtl sanity. We'll get to the right bit (x1xx) at
9249 code output time. */
9250 if (code == NE || code == LTGT)
9251 /* Do the inverse here because we have no cmpne
9252 instruction. We use the cmpeq instruction and expect
9253 to get a 0 instead. */
9254 code = EQ;
9255 else
9256 code = NE;
9257 }
9258
9259 emit_insn (cmp);
9260 }
9261 else
9262 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
9263 gen_rtx_COMPARE (comp_mode,
9264 rs6000_compare_op0,
9265 rs6000_compare_op1)));
9266
9267 /* Some kinds of FP comparisons need an OR operation;
9268 under flag_finite_math_only we don't bother. */
9269 if (rs6000_compare_fp_p
9270 && ! flag_finite_math_only
9271 && ! (TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
9272 && (code == LE || code == GE
9273 || code == UNEQ || code == LTGT
9274 || code == UNGT || code == UNLT))
9275 {
9276 enum rtx_code or1, or2;
9277 rtx or1_rtx, or2_rtx, compare2_rtx;
9278 rtx or_result = gen_reg_rtx (CCEQmode);
9279
9280 switch (code)
9281 {
9282 case LE: or1 = LT; or2 = EQ; break;
9283 case GE: or1 = GT; or2 = EQ; break;
9284 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
9285 case LTGT: or1 = LT; or2 = GT; break;
9286 case UNGT: or1 = UNORDERED; or2 = GT; break;
9287 case UNLT: or1 = UNORDERED; or2 = LT; break;
9288 default: abort ();
9289 }
9290 validate_condition_mode (or1, comp_mode);
9291 validate_condition_mode (or2, comp_mode);
9292 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
9293 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
9294 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
9295 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
9296 const_true_rtx);
9297 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
9298
9299 compare_result = or_result;
9300 code = EQ;
9301 }
9302
9303 validate_condition_mode (code, GET_MODE (compare_result));
9304
9305 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
9306 }
9307
9308
9309 /* Emit the RTL for an sCOND pattern. */
9310
9311 void
9312 rs6000_emit_sCOND (code, result)
9313 enum rtx_code code;
9314 rtx result;
9315 {
9316 rtx condition_rtx;
9317 enum machine_mode op_mode;
9318 enum rtx_code cond_code;
9319
9320 condition_rtx = rs6000_generate_compare (code);
9321 cond_code = GET_CODE (condition_rtx);
9322
9323 if (cond_code == NE
9324 || cond_code == GE || cond_code == LE
9325 || cond_code == GEU || cond_code == LEU
9326 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
9327 {
9328 rtx not_result = gen_reg_rtx (CCEQmode);
9329 rtx not_op, rev_cond_rtx;
9330 enum machine_mode cc_mode;
9331
9332 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
9333
9334 rev_cond_rtx = gen_rtx (rs6000_reverse_condition (cc_mode, cond_code),
9335 SImode, XEXP (condition_rtx, 0), const0_rtx);
9336 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
9337 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
9338 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
9339 }
9340
9341 op_mode = GET_MODE (rs6000_compare_op0);
9342 if (op_mode == VOIDmode)
9343 op_mode = GET_MODE (rs6000_compare_op1);
9344
9345 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
9346 {
9347 PUT_MODE (condition_rtx, DImode);
9348 convert_move (result, condition_rtx, 0);
9349 }
9350 else
9351 {
9352 PUT_MODE (condition_rtx, SImode);
9353 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
9354 }
9355 }
9356
9357 /* Emit a branch of kind CODE to location LOC. */
9358
9359 void
9360 rs6000_emit_cbranch (code, loc)
9361 enum rtx_code code;
9362 rtx loc;
9363 {
9364 rtx condition_rtx, loc_ref;
9365
9366 condition_rtx = rs6000_generate_compare (code);
9367 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
9368 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
9369 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
9370 loc_ref, pc_rtx)));
9371 }
9372
9373 /* Return the string to output a conditional branch to LABEL, which is
9374 the operand number of the label, or -1 if the branch is really a
9375 conditional return.
9376
9377 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
9378 condition code register and its mode specifies what kind of
9379 comparison we made.
9380
9381 REVERSED is nonzero if we should reverse the sense of the comparison.
9382
9383 INSN is the insn. */
9384
9385 char *
9386 output_cbranch (op, label, reversed, insn)
9387 rtx op;
9388 const char * label;
9389 int reversed;
9390 rtx insn;
9391 {
9392 static char string[64];
9393 enum rtx_code code = GET_CODE (op);
9394 rtx cc_reg = XEXP (op, 0);
9395 enum machine_mode mode = GET_MODE (cc_reg);
9396 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
9397 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
9398 int really_reversed = reversed ^ need_longbranch;
9399 char *s = string;
9400 const char *ccode;
9401 const char *pred;
9402 rtx note;
9403
9404 validate_condition_mode (code, mode);
9405
9406 /* Work out which way this really branches. We could use
9407 reverse_condition_maybe_unordered here always but this
9408 makes the resulting assembler clearer. */
9409 if (really_reversed)
9410 {
9411 /* Reversal of FP compares takes care -- an ordered compare
9412 becomes an unordered compare and vice versa. */
9413 if (mode == CCFPmode)
9414 code = reverse_condition_maybe_unordered (code);
9415 else
9416 code = reverse_condition (code);
9417 }
9418
9419 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
9420 {
9421 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
9422 to the GT bit. */
9423 if (code == EQ)
9424 /* Opposite of GT. */
9425 code = UNLE;
9426 else if (code == NE)
9427 code = GT;
9428 else
9429 abort ();
9430 }
9431
9432 switch (code)
9433 {
9434 /* Not all of these are actually distinct opcodes, but
9435 we distinguish them for clarity of the resulting assembler. */
9436 case NE: case LTGT:
9437 ccode = "ne"; break;
9438 case EQ: case UNEQ:
9439 ccode = "eq"; break;
9440 case GE: case GEU:
9441 ccode = "ge"; break;
9442 case GT: case GTU: case UNGT:
9443 ccode = "gt"; break;
9444 case LE: case LEU:
9445 ccode = "le"; break;
9446 case LT: case LTU: case UNLT:
9447 ccode = "lt"; break;
9448 case UNORDERED: ccode = "un"; break;
9449 case ORDERED: ccode = "nu"; break;
9450 case UNGE: ccode = "nl"; break;
9451 case UNLE: ccode = "ng"; break;
9452 default:
9453 abort ();
9454 }
9455
9456 /* Maybe we have a guess as to how likely the branch is.
9457 The old mnemonics don't have a way to specify this information. */
9458 pred = "";
9459 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
9460 if (note != NULL_RTX)
9461 {
9462 /* PROB is the difference from 50%. */
9463 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
9464 bool always_hint = rs6000_cpu != PROCESSOR_POWER4;
9465
9466 /* Only hint for highly probable/improbable branches on newer
9467 cpus as static prediction overrides processor dynamic
9468 prediction. For older cpus we may as well always hint, but
9469 assume not taken for branches that are very close to 50% as a
9470 mispredicted taken branch is more expensive than a
9471 mispredicted not-taken branch. */
9472 if (always_hint
9473 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
9474 {
9475 if (abs (prob) > REG_BR_PROB_BASE / 20
9476 && ((prob > 0) ^ need_longbranch))
9477 pred = "+";
9478 else
9479 pred = "-";
9480 }
9481 }
9482
9483 if (label == NULL)
9484 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
9485 else
9486 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
9487
9488 /* We need to escape any '%' characters in the reg_names string.
9489 Assume they'd only be the first character... */
9490 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
9491 *s++ = '%';
9492 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
9493
9494 if (label != NULL)
9495 {
9496 /* If the branch distance was too far, we may have to use an
9497 unconditional branch to go the distance. */
9498 if (need_longbranch)
9499 s += sprintf (s, ",$+8\n\tb %s", label);
9500 else
9501 s += sprintf (s, ",%s", label);
9502 }
9503
9504 return string;
9505 }
9506
9507 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
9508 operands of the last comparison is nonzero/true, FALSE_COND if it
9509 is zero/false. Return 0 if the hardware has no such operation. */
9510
9511 int
9512 rs6000_emit_cmove (dest, op, true_cond, false_cond)
9513 rtx dest;
9514 rtx op;
9515 rtx true_cond;
9516 rtx false_cond;
9517 {
9518 enum rtx_code code = GET_CODE (op);
9519 rtx op0 = rs6000_compare_op0;
9520 rtx op1 = rs6000_compare_op1;
9521 REAL_VALUE_TYPE c1;
9522 enum machine_mode compare_mode = GET_MODE (op0);
9523 enum machine_mode result_mode = GET_MODE (dest);
9524 rtx temp;
9525
9526 /* These modes should always match. */
9527 if (GET_MODE (op1) != compare_mode
9528 /* In the isel case however, we can use a compare immediate, so
9529 op1 may be a small constant. */
9530 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
9531 return 0;
9532 if (GET_MODE (true_cond) != result_mode)
9533 return 0;
9534 if (GET_MODE (false_cond) != result_mode)
9535 return 0;
9536
9537 /* First, work out if the hardware can do this at all, or
9538 if it's too slow... */
9539 if (! rs6000_compare_fp_p)
9540 {
9541 if (TARGET_ISEL)
9542 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
9543 return 0;
9544 }
9545
9546 /* Eliminate half of the comparisons by switching operands, this
9547 makes the remaining code simpler. */
9548 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
9549 || code == LTGT || code == LT || code == UNLE)
9550 {
9551 code = reverse_condition_maybe_unordered (code);
9552 temp = true_cond;
9553 true_cond = false_cond;
9554 false_cond = temp;
9555 }
9556
9557 /* UNEQ and LTGT take four instructions for a comparison with zero,
9558 it'll probably be faster to use a branch here too. */
9559 if (code == UNEQ && HONOR_NANS (compare_mode))
9560 return 0;
9561
9562 if (GET_CODE (op1) == CONST_DOUBLE)
9563 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
9564
9565 /* We're going to try to implement comparisons by performing
9566 a subtract, then comparing against zero. Unfortunately,
9567 Inf - Inf is NaN which is not zero, and so if we don't
9568 know that the operand is finite and the comparison
9569 would treat EQ different to UNORDERED, we can't do it. */
9570 if (HONOR_INFINITIES (compare_mode)
9571 && code != GT && code != UNGE
9572 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
9573 /* Constructs of the form (a OP b ? a : b) are safe. */
9574 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
9575 || (! rtx_equal_p (op0, true_cond)
9576 && ! rtx_equal_p (op1, true_cond))))
9577 return 0;
9578 /* At this point we know we can use fsel. */
9579
9580 /* Reduce the comparison to a comparison against zero. */
9581 temp = gen_reg_rtx (compare_mode);
9582 emit_insn (gen_rtx_SET (VOIDmode, temp,
9583 gen_rtx_MINUS (compare_mode, op0, op1)));
9584 op0 = temp;
9585 op1 = CONST0_RTX (compare_mode);
9586
9587 /* If we don't care about NaNs we can reduce some of the comparisons
9588 down to faster ones. */
9589 if (! HONOR_NANS (compare_mode))
9590 switch (code)
9591 {
9592 case GT:
9593 code = LE;
9594 temp = true_cond;
9595 true_cond = false_cond;
9596 false_cond = temp;
9597 break;
9598 case UNGE:
9599 code = GE;
9600 break;
9601 case UNEQ:
9602 code = EQ;
9603 break;
9604 default:
9605 break;
9606 }
9607
9608 /* Now, reduce everything down to a GE. */
9609 switch (code)
9610 {
9611 case GE:
9612 break;
9613
9614 case LE:
9615 temp = gen_reg_rtx (compare_mode);
9616 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9617 op0 = temp;
9618 break;
9619
9620 case ORDERED:
9621 temp = gen_reg_rtx (compare_mode);
9622 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
9623 op0 = temp;
9624 break;
9625
9626 case EQ:
9627 temp = gen_reg_rtx (compare_mode);
9628 emit_insn (gen_rtx_SET (VOIDmode, temp,
9629 gen_rtx_NEG (compare_mode,
9630 gen_rtx_ABS (compare_mode, op0))));
9631 op0 = temp;
9632 break;
9633
9634 case UNGE:
9635 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
9636 temp = gen_reg_rtx (result_mode);
9637 emit_insn (gen_rtx_SET (VOIDmode, temp,
9638 gen_rtx_IF_THEN_ELSE (result_mode,
9639 gen_rtx_GE (VOIDmode,
9640 op0, op1),
9641 true_cond, false_cond)));
9642 false_cond = true_cond;
9643 true_cond = temp;
9644
9645 temp = gen_reg_rtx (compare_mode);
9646 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9647 op0 = temp;
9648 break;
9649
9650 case GT:
9651 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
9652 temp = gen_reg_rtx (result_mode);
9653 emit_insn (gen_rtx_SET (VOIDmode, temp,
9654 gen_rtx_IF_THEN_ELSE (result_mode,
9655 gen_rtx_GE (VOIDmode,
9656 op0, op1),
9657 true_cond, false_cond)));
9658 true_cond = false_cond;
9659 false_cond = temp;
9660
9661 temp = gen_reg_rtx (compare_mode);
9662 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9663 op0 = temp;
9664 break;
9665
9666 default:
9667 abort ();
9668 }
9669
9670 emit_insn (gen_rtx_SET (VOIDmode, dest,
9671 gen_rtx_IF_THEN_ELSE (result_mode,
9672 gen_rtx_GE (VOIDmode,
9673 op0, op1),
9674 true_cond, false_cond)));
9675 return 1;
9676 }
9677
9678 /* Same as above, but for ints (isel). */
9679
9680 static int
9681 rs6000_emit_int_cmove (dest, op, true_cond, false_cond)
9682 rtx dest;
9683 rtx op;
9684 rtx true_cond;
9685 rtx false_cond;
9686 {
9687 rtx condition_rtx, cr;
9688
9689 /* All isel implementations thus far are 32-bits. */
9690 if (GET_MODE (rs6000_compare_op0) != SImode)
9691 return 0;
9692
9693 /* We still have to do the compare, because isel doesn't do a
9694 compare, it just looks at the CRx bits set by a previous compare
9695 instruction. */
9696 condition_rtx = rs6000_generate_compare (GET_CODE (op));
9697 cr = XEXP (condition_rtx, 0);
9698
9699 if (GET_MODE (cr) == CCmode)
9700 emit_insn (gen_isel_signed (dest, condition_rtx,
9701 true_cond, false_cond, cr));
9702 else
9703 emit_insn (gen_isel_unsigned (dest, condition_rtx,
9704 true_cond, false_cond, cr));
9705
9706 return 1;
9707 }
9708
9709 const char *
9710 output_isel (operands)
9711 rtx *operands;
9712 {
9713 enum rtx_code code;
9714
9715 code = GET_CODE (operands[1]);
9716 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
9717 {
9718 PUT_CODE (operands[1], reverse_condition (code));
9719 return "isel %0,%3,%2,%j1";
9720 }
9721 else
9722 return "isel %0,%2,%3,%j1";
9723 }
9724
9725 void
9726 rs6000_emit_minmax (dest, code, op0, op1)
9727 rtx dest;
9728 enum rtx_code code;
9729 rtx op0;
9730 rtx op1;
9731 {
9732 enum machine_mode mode = GET_MODE (op0);
9733 enum rtx_code c;
9734 rtx target;
9735
9736 if (code == SMAX || code == SMIN)
9737 c = GE;
9738 else
9739 c = GEU;
9740
9741 if (code == SMAX || code == UMAX)
9742 target = emit_conditional_move (dest, c, op0, op1, mode,
9743 op0, op1, mode, 0);
9744 else
9745 target = emit_conditional_move (dest, c, op0, op1, mode,
9746 op1, op0, mode, 0);
9747 if (target == NULL_RTX)
9748 abort ();
9749 if (target != dest)
9750 emit_move_insn (dest, target);
9751 }
9752
9753 /* Called by altivec splitter.
9754 Input:
9755 operands[0] : Destination of move
9756 operands[1] : Source of move
9757 noperands : Size of operands vector
9758 Output:
9759 operands[2-5] ([2-3] in 64 bit) : Destination slots
9760 operands[6-9] ([4-5] in 64 bit) : Source slots
9761
9762 Splits the move of operands[1] to operands[0].
9763 This is done, if GPRs are one of the operands. In this case
9764 a sequence of simple move insns has to be issued. The sequence of these
9765 move insns has to be done in correct order to avoid early clobber of the
9766 base register or destructive overlap of registers.
9767 */
9768
9769 void
9770 rs6000_split_altivec_in_gprs (rtx *operands)
9771 {
9772 int nregs, reg, i, j;
9773 enum machine_mode mode;
9774
9775 /* Calculate number to move (2/4 for 32/64 bit mode). */
9776
9777 reg = REG_P (operands[0]) ? REGNO (operands[0]) : REGNO (operands[1]);
9778 mode = GET_MODE (operands[0]);
9779 nregs = HARD_REGNO_NREGS (reg, mode);
9780
9781 if (REG_P (operands[1])
9782 && REG_P (operands[0])
9783 && (REGNO (operands[1]) < REGNO (operands[0])))
9784 {
9785 /* Move register range backwards, if we have destructive overlap. */
9786
9787 j = nregs;
9788 for (i = 0; i < nregs; i++)
9789 {
9790 j--;
9791 operands[i + 2] = operand_subword (operands[0], j, 0, mode);
9792 operands[i + 2 + nregs] =
9793 operand_subword (operands[1], j, 0, mode);
9794 }
9795 }
9796 else
9797 {
9798 j = -1;
9799
9800 if (GET_CODE (operands[1]) == MEM)
9801 {
9802 rtx breg;
9803 /* We have offsettable addresses only. If we use one of the
9804 registers to address memory, we have change that register last. */
9805 breg = GET_CODE (XEXP (operands[1], 0)) == PLUS ?
9806 XEXP (XEXP (operands[1], 0), 0) :
9807 XEXP (operands[1], 0);
9808
9809 if (REGNO (breg) >= REGNO (operands[0])
9810 && REGNO (breg) < REGNO (operands[0]) + nregs)
9811 j = REGNO (breg) - REGNO (operands[0]);
9812 }
9813
9814 for (i = 0; i < nregs; i++)
9815 {
9816 /* Calculate index to next subword. */
9817 j++;
9818 if (j == nregs)
9819 j = 0;
9820
9821 operands[i + 2] = operand_subword (operands[0], j, 0, mode);
9822 operands[i + 2 + nregs] =
9823 operand_subword (operands[1], j, 0, mode);
9824
9825 }
9826 }
9827 }
9828
9829 \f
9830 /* This page contains routines that are used to determine what the
9831 function prologue and epilogue code will do and write them out. */
9832
9833 /* Return the first fixed-point register that is required to be
9834 saved. 32 if none. */
9835
9836 int
9837 first_reg_to_save ()
9838 {
9839 int first_reg;
9840
9841 /* Find lowest numbered live register. */
9842 for (first_reg = 13; first_reg <= 31; first_reg++)
9843 if (regs_ever_live[first_reg]
9844 && (! call_used_regs[first_reg]
9845 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
9846 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
9847 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
9848 break;
9849
9850 #if TARGET_MACHO
9851 if (flag_pic
9852 && current_function_uses_pic_offset_table
9853 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
9854 return RS6000_PIC_OFFSET_TABLE_REGNUM;
9855 #endif
9856
9857 return first_reg;
9858 }
9859
9860 /* Similar, for FP regs. */
9861
9862 int
9863 first_fp_reg_to_save ()
9864 {
9865 int first_reg;
9866
9867 /* Find lowest numbered live register. */
9868 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
9869 if (regs_ever_live[first_reg])
9870 break;
9871
9872 return first_reg;
9873 }
9874
9875 /* Similar, for AltiVec regs. */
9876
9877 static int
9878 first_altivec_reg_to_save ()
9879 {
9880 int i;
9881
9882 /* Stack frame remains as is unless we are in AltiVec ABI. */
9883 if (! TARGET_ALTIVEC_ABI)
9884 return LAST_ALTIVEC_REGNO + 1;
9885
9886 /* Find lowest numbered live register. */
9887 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
9888 if (regs_ever_live[i])
9889 break;
9890
9891 return i;
9892 }
9893
9894 /* Return a 32-bit mask of the AltiVec registers we need to set in
9895 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
9896 the 32-bit word is 0. */
9897
9898 static unsigned int
9899 compute_vrsave_mask ()
9900 {
9901 unsigned int i, mask = 0;
9902
9903 /* First, find out if we use _any_ altivec registers. */
9904 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
9905 if (regs_ever_live[i])
9906 mask |= ALTIVEC_REG_BIT (i);
9907
9908 if (mask == 0)
9909 return mask;
9910
9911 /* Next, remove the argument registers from the set. These must
9912 be in the VRSAVE mask set by the caller, so we don't need to add
9913 them in again. More importantly, the mask we compute here is
9914 used to generate CLOBBERs in the set_vrsave insn, and we do not
9915 wish the argument registers to die. */
9916 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
9917 mask &= ~ALTIVEC_REG_BIT (i);
9918
9919 /* Similarly, remove the return value from the set. */
9920 {
9921 bool yes = false;
9922 diddle_return_value (is_altivec_return_reg, &yes);
9923 if (yes)
9924 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
9925 }
9926
9927 return mask;
9928 }
9929
9930 static void
9931 is_altivec_return_reg (reg, xyes)
9932 rtx reg;
9933 void *xyes;
9934 {
9935 bool *yes = (bool *) xyes;
9936 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
9937 *yes = true;
9938 }
9939
9940 \f
9941 /* Calculate the stack information for the current function. This is
9942 complicated by having two separate calling sequences, the AIX calling
9943 sequence and the V.4 calling sequence.
9944
9945 AIX (and Darwin/Mac OS X) stack frames look like:
9946 32-bit 64-bit
9947 SP----> +---------------------------------------+
9948 | back chain to caller | 0 0
9949 +---------------------------------------+
9950 | saved CR | 4 8 (8-11)
9951 +---------------------------------------+
9952 | saved LR | 8 16
9953 +---------------------------------------+
9954 | reserved for compilers | 12 24
9955 +---------------------------------------+
9956 | reserved for binders | 16 32
9957 +---------------------------------------+
9958 | saved TOC pointer | 20 40
9959 +---------------------------------------+
9960 | Parameter save area (P) | 24 48
9961 +---------------------------------------+
9962 | Alloca space (A) | 24+P etc.
9963 +---------------------------------------+
9964 | Local variable space (L) | 24+P+A
9965 +---------------------------------------+
9966 | Float/int conversion temporary (X) | 24+P+A+L
9967 +---------------------------------------+
9968 | Save area for AltiVec registers (W) | 24+P+A+L+X
9969 +---------------------------------------+
9970 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
9971 +---------------------------------------+
9972 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
9973 +---------------------------------------+
9974 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
9975 +---------------------------------------+
9976 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
9977 +---------------------------------------+
9978 old SP->| back chain to caller's caller |
9979 +---------------------------------------+
9980
9981 The required alignment for AIX configurations is two words (i.e., 8
9982 or 16 bytes).
9983
9984
9985 V.4 stack frames look like:
9986
9987 SP----> +---------------------------------------+
9988 | back chain to caller | 0
9989 +---------------------------------------+
9990 | caller's saved LR | 4
9991 +---------------------------------------+
9992 | Parameter save area (P) | 8
9993 +---------------------------------------+
9994 | Alloca space (A) | 8+P
9995 +---------------------------------------+
9996 | Varargs save area (V) | 8+P+A
9997 +---------------------------------------+
9998 | Local variable space (L) | 8+P+A+V
9999 +---------------------------------------+
10000 | Float/int conversion temporary (X) | 8+P+A+V+L
10001 +---------------------------------------+
10002 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
10003 +---------------------------------------+
10004 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
10005 +---------------------------------------+
10006 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
10007 +---------------------------------------+
10008 | SPE: area for 64-bit GP registers |
10009 +---------------------------------------+
10010 | SPE alignment padding |
10011 +---------------------------------------+
10012 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
10013 +---------------------------------------+
10014 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
10015 +---------------------------------------+
10016 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
10017 +---------------------------------------+
10018 old SP->| back chain to caller's caller |
10019 +---------------------------------------+
10020
10021 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
10022 given. (But note below and in sysv4.h that we require only 8 and
10023 may round up the size of our stack frame anyways. The historical
10024 reason is early versions of powerpc-linux which didn't properly
10025 align the stack at program startup. A happy side-effect is that
10026 -mno-eabi libraries can be used with -meabi programs.)
10027
10028 The EABI configuration defaults to the V.4 layout. However,
10029 the stack alignment requirements may differ. If -mno-eabi is not
10030 given, the required stack alignment is 8 bytes; if -mno-eabi is
10031 given, the required alignment is 16 bytes. (But see V.4 comment
10032 above.) */
10033
10034 #ifndef ABI_STACK_BOUNDARY
10035 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
10036 #endif
10037
10038 rs6000_stack_t *
10039 rs6000_stack_info ()
10040 {
10041 static rs6000_stack_t info, zero_info;
10042 rs6000_stack_t *info_ptr = &info;
10043 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10044 int ehrd_size;
10045 int total_raw_size;
10046
10047 /* Zero all fields portably. */
10048 info = zero_info;
10049
10050 if (TARGET_SPE)
10051 {
10052 /* Cache value so we don't rescan instruction chain over and over. */
10053 if (cfun->machine->insn_chain_scanned_p == 0)
10054 {
10055 cfun->machine->insn_chain_scanned_p = 1;
10056 info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
10057 }
10058 }
10059
10060 /* Select which calling sequence. */
10061 info_ptr->abi = DEFAULT_ABI;
10062
10063 /* Calculate which registers need to be saved & save area size. */
10064 info_ptr->first_gp_reg_save = first_reg_to_save ();
10065 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
10066 even if it currently looks like we won't. */
10067 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
10068 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
10069 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
10070 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
10071 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
10072 else
10073 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
10074
10075 /* For the SPE, we have an additional upper 32-bits on each GPR.
10076 Ideally we should save the entire 64-bits only when the upper
10077 half is used in SIMD instructions. Since we only record
10078 registers live (not the size they are used in), this proves
10079 difficult because we'd have to traverse the instruction chain at
10080 the right time, taking reload into account. This is a real pain,
10081 so we opt to save the GPRs in 64-bits always if but one register
10082 gets used in 64-bits. Otherwise, all the registers in the frame
10083 get saved in 32-bits.
10084
10085 So... since when we save all GPRs (except the SP) in 64-bits, the
10086 traditional GP save area will be empty. */
10087 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10088 info_ptr->gp_size = 0;
10089
10090 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
10091 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
10092
10093 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
10094 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
10095 - info_ptr->first_altivec_reg_save);
10096
10097 /* Does this function call anything? */
10098 info_ptr->calls_p = (! current_function_is_leaf
10099 || cfun->machine->ra_needs_full_frame);
10100
10101 /* Determine if we need to save the link register. */
10102 if (rs6000_ra_ever_killed ()
10103 || (DEFAULT_ABI == ABI_AIX
10104 && current_function_profile
10105 && !TARGET_PROFILE_KERNEL)
10106 #ifdef TARGET_RELOCATABLE
10107 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
10108 #endif
10109 || (info_ptr->first_fp_reg_save != 64
10110 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
10111 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
10112 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
10113 || (DEFAULT_ABI == ABI_DARWIN
10114 && flag_pic
10115 && current_function_uses_pic_offset_table)
10116 || info_ptr->calls_p)
10117 {
10118 info_ptr->lr_save_p = 1;
10119 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
10120 }
10121
10122 /* Determine if we need to save the condition code registers. */
10123 if (regs_ever_live[CR2_REGNO]
10124 || regs_ever_live[CR3_REGNO]
10125 || regs_ever_live[CR4_REGNO])
10126 {
10127 info_ptr->cr_save_p = 1;
10128 if (DEFAULT_ABI == ABI_V4)
10129 info_ptr->cr_size = reg_size;
10130 }
10131
10132 /* If the current function calls __builtin_eh_return, then we need
10133 to allocate stack space for registers that will hold data for
10134 the exception handler. */
10135 if (current_function_calls_eh_return)
10136 {
10137 unsigned int i;
10138 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
10139 continue;
10140
10141 /* SPE saves EH registers in 64-bits. */
10142 ehrd_size = i * (TARGET_SPE_ABI
10143 && info_ptr->spe_64bit_regs_used != 0
10144 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
10145 }
10146 else
10147 ehrd_size = 0;
10148
10149 /* Determine various sizes. */
10150 info_ptr->reg_size = reg_size;
10151 info_ptr->fixed_size = RS6000_SAVE_AREA;
10152 info_ptr->varargs_size = RS6000_VARARGS_AREA;
10153 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
10154 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
10155 8);
10156
10157 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10158 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
10159 else
10160 info_ptr->spe_gp_size = 0;
10161
10162 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
10163 {
10164 info_ptr->vrsave_mask = compute_vrsave_mask ();
10165 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
10166 }
10167 else
10168 {
10169 info_ptr->vrsave_mask = 0;
10170 info_ptr->vrsave_size = 0;
10171 }
10172
10173 /* Calculate the offsets. */
10174 switch (DEFAULT_ABI)
10175 {
10176 case ABI_NONE:
10177 default:
10178 abort ();
10179
10180 case ABI_AIX:
10181 case ABI_DARWIN:
10182 info_ptr->fp_save_offset = - info_ptr->fp_size;
10183 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
10184
10185 if (TARGET_ALTIVEC_ABI)
10186 {
10187 info_ptr->vrsave_save_offset
10188 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
10189
10190 /* Align stack so vector save area is on a quadword boundary. */
10191 if (info_ptr->altivec_size != 0)
10192 info_ptr->altivec_padding_size
10193 = 16 - (-info_ptr->vrsave_save_offset % 16);
10194 else
10195 info_ptr->altivec_padding_size = 0;
10196
10197 info_ptr->altivec_save_offset
10198 = info_ptr->vrsave_save_offset
10199 - info_ptr->altivec_padding_size
10200 - info_ptr->altivec_size;
10201
10202 /* Adjust for AltiVec case. */
10203 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
10204 }
10205 else
10206 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
10207 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
10208 info_ptr->lr_save_offset = 2*reg_size;
10209 break;
10210
10211 case ABI_V4:
10212 info_ptr->fp_save_offset = - info_ptr->fp_size;
10213 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
10214 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
10215
10216 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10217 {
10218 /* Align stack so SPE GPR save area is aligned on a
10219 double-word boundary. */
10220 if (info_ptr->spe_gp_size != 0)
10221 info_ptr->spe_padding_size
10222 = 8 - (-info_ptr->cr_save_offset % 8);
10223 else
10224 info_ptr->spe_padding_size = 0;
10225
10226 info_ptr->spe_gp_save_offset
10227 = info_ptr->cr_save_offset
10228 - info_ptr->spe_padding_size
10229 - info_ptr->spe_gp_size;
10230
10231 /* Adjust for SPE case. */
10232 info_ptr->toc_save_offset
10233 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
10234 }
10235 else if (TARGET_ALTIVEC_ABI)
10236 {
10237 info_ptr->vrsave_save_offset
10238 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
10239
10240 /* Align stack so vector save area is on a quadword boundary. */
10241 if (info_ptr->altivec_size != 0)
10242 info_ptr->altivec_padding_size
10243 = 16 - (-info_ptr->vrsave_save_offset % 16);
10244 else
10245 info_ptr->altivec_padding_size = 0;
10246
10247 info_ptr->altivec_save_offset
10248 = info_ptr->vrsave_save_offset
10249 - info_ptr->altivec_padding_size
10250 - info_ptr->altivec_size;
10251
10252 /* Adjust for AltiVec case. */
10253 info_ptr->toc_save_offset
10254 = info_ptr->altivec_save_offset - info_ptr->toc_size;
10255 }
10256 else
10257 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
10258 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
10259 info_ptr->lr_save_offset = reg_size;
10260 break;
10261 }
10262
10263 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
10264 + info_ptr->gp_size
10265 + info_ptr->altivec_size
10266 + info_ptr->altivec_padding_size
10267 + info_ptr->spe_gp_size
10268 + info_ptr->spe_padding_size
10269 + ehrd_size
10270 + info_ptr->cr_size
10271 + info_ptr->lr_size
10272 + info_ptr->vrsave_size
10273 + info_ptr->toc_size,
10274 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
10275 ? 16 : 8);
10276
10277 total_raw_size = (info_ptr->vars_size
10278 + info_ptr->parm_size
10279 + info_ptr->save_size
10280 + info_ptr->varargs_size
10281 + info_ptr->fixed_size);
10282
10283 info_ptr->total_size =
10284 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
10285
10286 /* Determine if we need to allocate any stack frame:
10287
10288 For AIX we need to push the stack if a frame pointer is needed
10289 (because the stack might be dynamically adjusted), if we are
10290 debugging, if we make calls, or if the sum of fp_save, gp_save,
10291 and local variables are more than the space needed to save all
10292 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
10293 + 18*8 = 288 (GPR13 reserved).
10294
10295 For V.4 we don't have the stack cushion that AIX uses, but assume
10296 that the debugger can handle stackless frames. */
10297
10298 if (info_ptr->calls_p)
10299 info_ptr->push_p = 1;
10300
10301 else if (DEFAULT_ABI == ABI_V4)
10302 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
10303
10304 else if (frame_pointer_needed)
10305 info_ptr->push_p = 1;
10306
10307 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
10308 info_ptr->push_p = 1;
10309
10310 else
10311 info_ptr->push_p
10312 = total_raw_size - info_ptr->fixed_size > (TARGET_32BIT ? 220 : 288);
10313
10314 /* Zero offsets if we're not saving those registers. */
10315 if (info_ptr->fp_size == 0)
10316 info_ptr->fp_save_offset = 0;
10317
10318 if (info_ptr->gp_size == 0)
10319 info_ptr->gp_save_offset = 0;
10320
10321 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
10322 info_ptr->altivec_save_offset = 0;
10323
10324 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
10325 info_ptr->vrsave_save_offset = 0;
10326
10327 if (! TARGET_SPE_ABI
10328 || info_ptr->spe_64bit_regs_used == 0
10329 || info_ptr->spe_gp_size == 0)
10330 info_ptr->spe_gp_save_offset = 0;
10331
10332 if (! info_ptr->lr_save_p)
10333 info_ptr->lr_save_offset = 0;
10334
10335 if (! info_ptr->cr_save_p)
10336 info_ptr->cr_save_offset = 0;
10337
10338 if (! info_ptr->toc_save_p)
10339 info_ptr->toc_save_offset = 0;
10340
10341 return info_ptr;
10342 }
10343
10344 /* Return true if the current function uses any GPRs in 64-bit SIMD
10345 mode. */
10346
10347 static bool
10348 spe_func_has_64bit_regs_p ()
10349 {
10350 rtx insns, insn;
10351
10352 /* Functions that save and restore all the call-saved registers will
10353 need to save/restore the registers in 64-bits. */
10354 if (current_function_calls_eh_return
10355 || current_function_calls_setjmp
10356 || current_function_has_nonlocal_goto)
10357 return true;
10358
10359 insns = get_insns ();
10360
10361 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
10362 {
10363 if (INSN_P (insn))
10364 {
10365 rtx i;
10366
10367 i = PATTERN (insn);
10368 if (GET_CODE (i) == SET
10369 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
10370 return true;
10371 }
10372 }
10373
10374 return false;
10375 }
10376
10377 void
10378 debug_stack_info (info)
10379 rs6000_stack_t *info;
10380 {
10381 const char *abi_string;
10382
10383 if (! info)
10384 info = rs6000_stack_info ();
10385
10386 fprintf (stderr, "\nStack information for function %s:\n",
10387 ((current_function_decl && DECL_NAME (current_function_decl))
10388 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
10389 : "<unknown>"));
10390
10391 switch (info->abi)
10392 {
10393 default: abi_string = "Unknown"; break;
10394 case ABI_NONE: abi_string = "NONE"; break;
10395 case ABI_AIX: abi_string = "AIX"; break;
10396 case ABI_DARWIN: abi_string = "Darwin"; break;
10397 case ABI_V4: abi_string = "V.4"; break;
10398 }
10399
10400 fprintf (stderr, "\tABI = %5s\n", abi_string);
10401
10402 if (TARGET_ALTIVEC_ABI)
10403 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
10404
10405 if (TARGET_SPE_ABI)
10406 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
10407
10408 if (info->first_gp_reg_save != 32)
10409 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
10410
10411 if (info->first_fp_reg_save != 64)
10412 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
10413
10414 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
10415 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
10416 info->first_altivec_reg_save);
10417
10418 if (info->lr_save_p)
10419 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
10420
10421 if (info->cr_save_p)
10422 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
10423
10424 if (info->toc_save_p)
10425 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
10426
10427 if (info->vrsave_mask)
10428 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
10429
10430 if (info->push_p)
10431 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
10432
10433 if (info->calls_p)
10434 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
10435
10436 if (info->gp_save_offset)
10437 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
10438
10439 if (info->fp_save_offset)
10440 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
10441
10442 if (info->altivec_save_offset)
10443 fprintf (stderr, "\taltivec_save_offset = %5d\n",
10444 info->altivec_save_offset);
10445
10446 if (info->spe_gp_save_offset)
10447 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
10448 info->spe_gp_save_offset);
10449
10450 if (info->vrsave_save_offset)
10451 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
10452 info->vrsave_save_offset);
10453
10454 if (info->lr_save_offset)
10455 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
10456
10457 if (info->cr_save_offset)
10458 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
10459
10460 if (info->toc_save_offset)
10461 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
10462
10463 if (info->varargs_save_offset)
10464 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
10465
10466 if (info->total_size)
10467 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
10468
10469 if (info->varargs_size)
10470 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
10471
10472 if (info->vars_size)
10473 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
10474
10475 if (info->parm_size)
10476 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
10477
10478 if (info->fixed_size)
10479 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
10480
10481 if (info->gp_size)
10482 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
10483
10484 if (info->spe_gp_size)
10485 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
10486
10487 if (info->fp_size)
10488 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
10489
10490 if (info->altivec_size)
10491 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
10492
10493 if (info->vrsave_size)
10494 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
10495
10496 if (info->altivec_padding_size)
10497 fprintf (stderr, "\taltivec_padding_size= %5d\n",
10498 info->altivec_padding_size);
10499
10500 if (info->spe_padding_size)
10501 fprintf (stderr, "\tspe_padding_size = %5d\n",
10502 info->spe_padding_size);
10503
10504 if (info->lr_size)
10505 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
10506
10507 if (info->cr_size)
10508 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
10509
10510 if (info->toc_size)
10511 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
10512
10513 if (info->save_size)
10514 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
10515
10516 if (info->reg_size != 4)
10517 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
10518
10519 fprintf (stderr, "\n");
10520 }
10521
10522 rtx
10523 rs6000_return_addr (count, frame)
10524 int count;
10525 rtx frame;
10526 {
10527 /* Currently we don't optimize very well between prolog and body
10528 code and for PIC code the code can be actually quite bad, so
10529 don't try to be too clever here. */
10530 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
10531 {
10532 cfun->machine->ra_needs_full_frame = 1;
10533
10534 return
10535 gen_rtx_MEM
10536 (Pmode,
10537 memory_address
10538 (Pmode,
10539 plus_constant (copy_to_reg
10540 (gen_rtx_MEM (Pmode,
10541 memory_address (Pmode, frame))),
10542 RETURN_ADDRESS_OFFSET)));
10543 }
10544
10545 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
10546 }
10547
10548 /* Say whether a function is a candidate for sibcall handling or not.
10549 We do not allow indirect calls to be optimized into sibling calls.
10550 Also, we can't do it if there are any vector parameters; there's
10551 nowhere to put the VRsave code so it works; note that functions with
10552 vector parameters are required to have a prototype, so the argument
10553 type info must be available here. (The tail recursion case can work
10554 with vector parameters, but there's no way to distinguish here.) */
10555 static bool
10556 rs6000_function_ok_for_sibcall (decl, exp)
10557 tree decl;
10558 tree exp ATTRIBUTE_UNUSED;
10559 {
10560 tree type;
10561 if (decl)
10562 {
10563 if (TARGET_ALTIVEC_VRSAVE)
10564 {
10565 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
10566 type; type = TREE_CHAIN (type))
10567 {
10568 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
10569 return false;
10570 }
10571 }
10572 if (DEFAULT_ABI == ABI_DARWIN
10573 || (*targetm.binds_local_p) (decl))
10574 {
10575 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
10576
10577 if (!lookup_attribute ("longcall", attr_list)
10578 || lookup_attribute ("shortcall", attr_list))
10579 return true;
10580 }
10581 }
10582 return false;
10583 }
10584
10585 static int
10586 rs6000_ra_ever_killed ()
10587 {
10588 rtx top;
10589 rtx reg;
10590 rtx insn;
10591
10592 /* Irritatingly, there are two kinds of thunks -- those created with
10593 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
10594 through the regular part of the compiler. This is a very hacky
10595 way to tell them apart. */
10596 if (current_function_is_thunk && !no_new_pseudos)
10597 return 0;
10598
10599 /* regs_ever_live has LR marked as used if any sibcalls are present,
10600 but this should not force saving and restoring in the
10601 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
10602 clobbers LR, so that is inappropriate. */
10603
10604 /* Also, the prologue can generate a store into LR that
10605 doesn't really count, like this:
10606
10607 move LR->R0
10608 bcl to set PIC register
10609 move LR->R31
10610 move R0->LR
10611
10612 When we're called from the epilogue, we need to avoid counting
10613 this as a store. */
10614
10615 push_topmost_sequence ();
10616 top = get_insns ();
10617 pop_topmost_sequence ();
10618 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
10619
10620 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
10621 {
10622 if (INSN_P (insn))
10623 {
10624 if (FIND_REG_INC_NOTE (insn, reg))
10625 return 1;
10626 else if (GET_CODE (insn) == CALL_INSN
10627 && !SIBLING_CALL_P (insn))
10628 return 1;
10629 else if (set_of (reg, insn) != NULL_RTX
10630 && !prologue_epilogue_contains (insn))
10631 return 1;
10632 }
10633 }
10634 return 0;
10635 }
10636 \f
10637 /* Add a REG_MAYBE_DEAD note to the insn. */
10638 static void
10639 rs6000_maybe_dead (insn)
10640 rtx insn;
10641 {
10642 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
10643 const0_rtx,
10644 REG_NOTES (insn));
10645 }
10646
10647 /* Emit instructions needed to load the TOC register.
10648 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
10649 a constant pool; or for SVR4 -fpic. */
10650
10651 void
10652 rs6000_emit_load_toc_table (fromprolog)
10653 int fromprolog;
10654 {
10655 rtx dest, insn;
10656 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
10657
10658 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
10659 {
10660 rtx temp = (fromprolog
10661 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
10662 : gen_reg_rtx (Pmode));
10663 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
10664 if (fromprolog)
10665 rs6000_maybe_dead (insn);
10666 insn = emit_move_insn (dest, temp);
10667 if (fromprolog)
10668 rs6000_maybe_dead (insn);
10669 }
10670 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
10671 {
10672 char buf[30];
10673 rtx tempLR = (fromprolog
10674 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
10675 : gen_reg_rtx (Pmode));
10676 rtx temp0 = (fromprolog
10677 ? gen_rtx_REG (Pmode, 0)
10678 : gen_reg_rtx (Pmode));
10679 rtx symF;
10680
10681 /* possibly create the toc section */
10682 if (! toc_initialized)
10683 {
10684 toc_section ();
10685 function_section (current_function_decl);
10686 }
10687
10688 if (fromprolog)
10689 {
10690 rtx symL;
10691
10692 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
10693 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10694
10695 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
10696 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10697
10698 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
10699 symF)));
10700 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
10701 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
10702 symL,
10703 symF)));
10704 }
10705 else
10706 {
10707 rtx tocsym;
10708 static int reload_toc_labelno = 0;
10709
10710 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
10711
10712 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
10713 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10714
10715 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
10716 emit_move_insn (dest, tempLR);
10717 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
10718 }
10719 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
10720 if (fromprolog)
10721 rs6000_maybe_dead (insn);
10722 }
10723 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
10724 {
10725 /* This is for AIX code running in non-PIC ELF32. */
10726 char buf[30];
10727 rtx realsym;
10728 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
10729 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10730
10731 insn = emit_insn (gen_elf_high (dest, realsym));
10732 if (fromprolog)
10733 rs6000_maybe_dead (insn);
10734 insn = emit_insn (gen_elf_low (dest, dest, realsym));
10735 if (fromprolog)
10736 rs6000_maybe_dead (insn);
10737 }
10738 else if (DEFAULT_ABI == ABI_AIX)
10739 {
10740 if (TARGET_32BIT)
10741 insn = emit_insn (gen_load_toc_aix_si (dest));
10742 else
10743 insn = emit_insn (gen_load_toc_aix_di (dest));
10744 if (fromprolog)
10745 rs6000_maybe_dead (insn);
10746 }
10747 else
10748 abort ();
10749 }
10750
10751 int
10752 get_TOC_alias_set ()
10753 {
10754 static int set = -1;
10755 if (set == -1)
10756 set = new_alias_set ();
10757 return set;
10758 }
10759
10760 /* This returns nonzero if the current function uses the TOC. This is
10761 determined by the presence of (unspec ... UNSPEC_TOC) or
10762 use (unspec ... UNSPEC_TOC), which are generated by the various
10763 load_toc_* patterns. */
10764
10765 int
10766 uses_TOC ()
10767 {
10768 rtx insn;
10769
10770 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
10771 if (INSN_P (insn))
10772 {
10773 rtx pat = PATTERN (insn);
10774 int i;
10775
10776 if (GET_CODE (pat) == PARALLEL)
10777 for (i = 0; i < XVECLEN (pat, 0); i++)
10778 {
10779 rtx sub = XVECEXP (pat, 0, i);
10780 if (GET_CODE (sub) == USE)
10781 {
10782 sub = XEXP (sub, 0);
10783 if (GET_CODE (sub) == UNSPEC
10784 && XINT (sub, 1) == UNSPEC_TOC)
10785 return 1;
10786 }
10787 }
10788 }
10789 return 0;
10790 }
10791
10792 rtx
10793 create_TOC_reference (symbol)
10794 rtx symbol;
10795 {
10796 return gen_rtx_PLUS (Pmode,
10797 gen_rtx_REG (Pmode, TOC_REGISTER),
10798 gen_rtx_CONST (Pmode,
10799 gen_rtx_MINUS (Pmode, symbol,
10800 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
10801 }
10802
10803 /* __throw will restore its own return address to be the same as the
10804 return address of the function that the throw is being made to.
10805 This is unfortunate, because we want to check the original
10806 return address to see if we need to restore the TOC.
10807 So we have to squirrel it away here.
10808 This is used only in compiling __throw and __rethrow.
10809
10810 Most of this code should be removed by CSE. */
10811 static rtx insn_after_throw;
10812
10813 /* This does the saving... */
10814 void
10815 rs6000_aix_emit_builtin_unwind_init ()
10816 {
10817 rtx mem;
10818 rtx stack_top = gen_reg_rtx (Pmode);
10819 rtx opcode_addr = gen_reg_rtx (Pmode);
10820
10821 insn_after_throw = gen_reg_rtx (SImode);
10822
10823 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
10824 emit_move_insn (stack_top, mem);
10825
10826 mem = gen_rtx_MEM (Pmode,
10827 gen_rtx_PLUS (Pmode, stack_top,
10828 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
10829 emit_move_insn (opcode_addr, mem);
10830 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
10831 }
10832
10833 /* Emit insns to _restore_ the TOC register, at runtime (specifically
10834 in _eh.o). Only used on AIX.
10835
10836 The idea is that on AIX, function calls look like this:
10837 bl somefunction-trampoline
10838 lwz r2,20(sp)
10839
10840 and later,
10841 somefunction-trampoline:
10842 stw r2,20(sp)
10843 ... load function address in the count register ...
10844 bctr
10845 or like this, if the linker determines that this is not a cross-module call
10846 and so the TOC need not be restored:
10847 bl somefunction
10848 nop
10849 or like this, if the compiler could determine that this is not a
10850 cross-module call:
10851 bl somefunction
10852 now, the tricky bit here is that register 2 is saved and restored
10853 by the _linker_, so we can't readily generate debugging information
10854 for it. So we need to go back up the call chain looking at the
10855 insns at return addresses to see which calls saved the TOC register
10856 and so see where it gets restored from.
10857
10858 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
10859 just before the actual epilogue.
10860
10861 On the bright side, this incurs no space or time overhead unless an
10862 exception is thrown, except for the extra code in libgcc.a.
10863
10864 The parameter STACKSIZE is a register containing (at runtime)
10865 the amount to be popped off the stack in addition to the stack frame
10866 of this routine (which will be __throw or __rethrow, and so is
10867 guaranteed to have a stack frame). */
10868
10869 void
10870 rs6000_emit_eh_toc_restore (stacksize)
10871 rtx stacksize;
10872 {
10873 rtx top_of_stack;
10874 rtx bottom_of_stack = gen_reg_rtx (Pmode);
10875 rtx tocompare = gen_reg_rtx (SImode);
10876 rtx opcode = gen_reg_rtx (SImode);
10877 rtx opcode_addr = gen_reg_rtx (Pmode);
10878 rtx mem;
10879 rtx loop_start = gen_label_rtx ();
10880 rtx no_toc_restore_needed = gen_label_rtx ();
10881 rtx loop_exit = gen_label_rtx ();
10882
10883 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
10884 set_mem_alias_set (mem, rs6000_sr_alias_set);
10885 emit_move_insn (bottom_of_stack, mem);
10886
10887 top_of_stack = expand_binop (Pmode, add_optab,
10888 bottom_of_stack, stacksize,
10889 NULL_RTX, 1, OPTAB_WIDEN);
10890
10891 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
10892 : 0xE8410028, SImode));
10893
10894 if (insn_after_throw == NULL_RTX)
10895 abort ();
10896 emit_move_insn (opcode, insn_after_throw);
10897
10898 emit_note (NOTE_INSN_LOOP_BEG);
10899 emit_label (loop_start);
10900
10901 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
10902 SImode, NULL_RTX, NULL_RTX,
10903 no_toc_restore_needed);
10904
10905 mem = gen_rtx_MEM (Pmode,
10906 gen_rtx_PLUS (Pmode, bottom_of_stack,
10907 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
10908 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
10909
10910 emit_label (no_toc_restore_needed);
10911 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
10912 Pmode, NULL_RTX, NULL_RTX,
10913 loop_exit);
10914
10915 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
10916 set_mem_alias_set (mem, rs6000_sr_alias_set);
10917 emit_move_insn (bottom_of_stack, mem);
10918
10919 mem = gen_rtx_MEM (Pmode,
10920 gen_rtx_PLUS (Pmode, bottom_of_stack,
10921 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
10922 emit_move_insn (opcode_addr, mem);
10923 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
10924
10925 emit_note (NOTE_INSN_LOOP_CONT);
10926 emit_jump (loop_start);
10927 emit_note (NOTE_INSN_LOOP_END);
10928 emit_label (loop_exit);
10929 }
10930 \f
10931 /* This ties together stack memory (MEM with an alias set of
10932 rs6000_sr_alias_set) and the change to the stack pointer. */
10933
10934 static void
10935 rs6000_emit_stack_tie ()
10936 {
10937 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
10938
10939 set_mem_alias_set (mem, rs6000_sr_alias_set);
10940 emit_insn (gen_stack_tie (mem));
10941 }
10942
10943 /* Emit the correct code for allocating stack space, as insns.
10944 If COPY_R12, make sure a copy of the old frame is left in r12.
10945 The generated code may use hard register 0 as a temporary. */
10946
10947 static void
10948 rs6000_emit_allocate_stack (size, copy_r12)
10949 HOST_WIDE_INT size;
10950 int copy_r12;
10951 {
10952 rtx insn;
10953 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10954 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
10955 rtx todec = GEN_INT (-size);
10956
10957 if (current_function_limit_stack)
10958 {
10959 if (REG_P (stack_limit_rtx)
10960 && REGNO (stack_limit_rtx) > 1
10961 && REGNO (stack_limit_rtx) <= 31)
10962 {
10963 emit_insn (TARGET_32BIT
10964 ? gen_addsi3 (tmp_reg,
10965 stack_limit_rtx,
10966 GEN_INT (size))
10967 : gen_adddi3 (tmp_reg,
10968 stack_limit_rtx,
10969 GEN_INT (size)));
10970
10971 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10972 const0_rtx));
10973 }
10974 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
10975 && TARGET_32BIT
10976 && DEFAULT_ABI == ABI_V4)
10977 {
10978 rtx toload = gen_rtx_CONST (VOIDmode,
10979 gen_rtx_PLUS (Pmode,
10980 stack_limit_rtx,
10981 GEN_INT (size)));
10982
10983 emit_insn (gen_elf_high (tmp_reg, toload));
10984 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
10985 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10986 const0_rtx));
10987 }
10988 else
10989 warning ("stack limit expression is not supported");
10990 }
10991
10992 if (copy_r12 || ! TARGET_UPDATE)
10993 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
10994
10995 if (TARGET_UPDATE)
10996 {
10997 if (size > 32767)
10998 {
10999 /* Need a note here so that try_split doesn't get confused. */
11000 if (get_last_insn() == NULL_RTX)
11001 emit_note (NOTE_INSN_DELETED);
11002 insn = emit_move_insn (tmp_reg, todec);
11003 try_split (PATTERN (insn), insn, 0);
11004 todec = tmp_reg;
11005 }
11006
11007 insn = emit_insn (TARGET_32BIT
11008 ? gen_movsi_update (stack_reg, stack_reg,
11009 todec, stack_reg)
11010 : gen_movdi_update (stack_reg, stack_reg,
11011 todec, stack_reg));
11012 }
11013 else
11014 {
11015 insn = emit_insn (TARGET_32BIT
11016 ? gen_addsi3 (stack_reg, stack_reg, todec)
11017 : gen_adddi3 (stack_reg, stack_reg, todec));
11018 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
11019 gen_rtx_REG (Pmode, 12));
11020 }
11021
11022 RTX_FRAME_RELATED_P (insn) = 1;
11023 REG_NOTES (insn) =
11024 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
11025 gen_rtx_SET (VOIDmode, stack_reg,
11026 gen_rtx_PLUS (Pmode, stack_reg,
11027 GEN_INT (-size))),
11028 REG_NOTES (insn));
11029 }
11030
11031 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
11032 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
11033 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
11034 deduce these equivalences by itself so it wasn't necessary to hold
11035 its hand so much. */
11036
11037 static void
11038 rs6000_frame_related (insn, reg, val, reg2, rreg)
11039 rtx insn;
11040 rtx reg;
11041 HOST_WIDE_INT val;
11042 rtx reg2;
11043 rtx rreg;
11044 {
11045 rtx real, temp;
11046
11047 /* copy_rtx will not make unique copies of registers, so we need to
11048 ensure we don't have unwanted sharing here. */
11049 if (reg == reg2)
11050 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
11051
11052 if (reg == rreg)
11053 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
11054
11055 real = copy_rtx (PATTERN (insn));
11056
11057 if (reg2 != NULL_RTX)
11058 real = replace_rtx (real, reg2, rreg);
11059
11060 real = replace_rtx (real, reg,
11061 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
11062 STACK_POINTER_REGNUM),
11063 GEN_INT (val)));
11064
11065 /* We expect that 'real' is either a SET or a PARALLEL containing
11066 SETs (and possibly other stuff). In a PARALLEL, all the SETs
11067 are important so they all have to be marked RTX_FRAME_RELATED_P. */
11068
11069 if (GET_CODE (real) == SET)
11070 {
11071 rtx set = real;
11072
11073 temp = simplify_rtx (SET_SRC (set));
11074 if (temp)
11075 SET_SRC (set) = temp;
11076 temp = simplify_rtx (SET_DEST (set));
11077 if (temp)
11078 SET_DEST (set) = temp;
11079 if (GET_CODE (SET_DEST (set)) == MEM)
11080 {
11081 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11082 if (temp)
11083 XEXP (SET_DEST (set), 0) = temp;
11084 }
11085 }
11086 else if (GET_CODE (real) == PARALLEL)
11087 {
11088 int i;
11089 for (i = 0; i < XVECLEN (real, 0); i++)
11090 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
11091 {
11092 rtx set = XVECEXP (real, 0, i);
11093
11094 temp = simplify_rtx (SET_SRC (set));
11095 if (temp)
11096 SET_SRC (set) = temp;
11097 temp = simplify_rtx (SET_DEST (set));
11098 if (temp)
11099 SET_DEST (set) = temp;
11100 if (GET_CODE (SET_DEST (set)) == MEM)
11101 {
11102 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11103 if (temp)
11104 XEXP (SET_DEST (set), 0) = temp;
11105 }
11106 RTX_FRAME_RELATED_P (set) = 1;
11107 }
11108 }
11109 else
11110 abort ();
11111
11112 if (TARGET_SPE)
11113 real = spe_synthesize_frame_save (real);
11114
11115 RTX_FRAME_RELATED_P (insn) = 1;
11116 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
11117 real,
11118 REG_NOTES (insn));
11119 }
11120
11121 /* Given an SPE frame note, return a PARALLEL of SETs with the
11122 original note, plus a synthetic register save. */
11123
11124 static rtx
11125 spe_synthesize_frame_save (real)
11126 rtx real;
11127 {
11128 rtx synth, offset, reg, real2;
11129
11130 if (GET_CODE (real) != SET
11131 || GET_MODE (SET_SRC (real)) != V2SImode)
11132 return real;
11133
11134 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
11135 frame related note. The parallel contains a set of the register
11136 being saved, and another set to a synthetic register (n+1200).
11137 This is so we can differentiate between 64-bit and 32-bit saves.
11138 Words cannot describe this nastiness. */
11139
11140 if (GET_CODE (SET_DEST (real)) != MEM
11141 || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
11142 || GET_CODE (SET_SRC (real)) != REG)
11143 abort ();
11144
11145 /* Transform:
11146 (set (mem (plus (reg x) (const y)))
11147 (reg z))
11148 into:
11149 (set (mem (plus (reg x) (const y+4)))
11150 (reg z+1200))
11151 */
11152
11153 real2 = copy_rtx (real);
11154 PUT_MODE (SET_DEST (real2), SImode);
11155 reg = SET_SRC (real2);
11156 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
11157 synth = copy_rtx (real2);
11158
11159 if (BYTES_BIG_ENDIAN)
11160 {
11161 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
11162 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
11163 }
11164
11165 reg = SET_SRC (synth);
11166
11167 synth = replace_rtx (synth, reg,
11168 gen_rtx_REG (SImode, REGNO (reg) + 1200));
11169
11170 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
11171 synth = replace_rtx (synth, offset,
11172 GEN_INT (INTVAL (offset)
11173 + (BYTES_BIG_ENDIAN ? 0 : 4)));
11174
11175 RTX_FRAME_RELATED_P (synth) = 1;
11176 RTX_FRAME_RELATED_P (real2) = 1;
11177 if (BYTES_BIG_ENDIAN)
11178 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
11179 else
11180 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
11181
11182 return real;
11183 }
11184
11185 /* Returns an insn that has a vrsave set operation with the
11186 appropriate CLOBBERs. */
11187
11188 static rtx
11189 generate_set_vrsave (reg, info, epiloguep)
11190 rtx reg;
11191 rs6000_stack_t *info;
11192 int epiloguep;
11193 {
11194 int nclobs, i;
11195 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
11196 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11197
11198 clobs[0]
11199 = gen_rtx_SET (VOIDmode,
11200 vrsave,
11201 gen_rtx_UNSPEC_VOLATILE (SImode,
11202 gen_rtvec (2, reg, vrsave),
11203 30));
11204
11205 nclobs = 1;
11206
11207 /* We need to clobber the registers in the mask so the scheduler
11208 does not move sets to VRSAVE before sets of AltiVec registers.
11209
11210 However, if the function receives nonlocal gotos, reload will set
11211 all call saved registers live. We will end up with:
11212
11213 (set (reg 999) (mem))
11214 (parallel [ (set (reg vrsave) (unspec blah))
11215 (clobber (reg 999))])
11216
11217 The clobber will cause the store into reg 999 to be dead, and
11218 flow will attempt to delete an epilogue insn. In this case, we
11219 need an unspec use/set of the register. */
11220
11221 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
11222 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
11223 {
11224 if (!epiloguep || call_used_regs [i])
11225 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
11226 gen_rtx_REG (V4SImode, i));
11227 else
11228 {
11229 rtx reg = gen_rtx_REG (V4SImode, i);
11230
11231 clobs[nclobs++]
11232 = gen_rtx_SET (VOIDmode,
11233 reg,
11234 gen_rtx_UNSPEC (V4SImode,
11235 gen_rtvec (1, reg), 27));
11236 }
11237 }
11238
11239 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
11240
11241 for (i = 0; i < nclobs; ++i)
11242 XVECEXP (insn, 0, i) = clobs[i];
11243
11244 return insn;
11245 }
11246
11247 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
11248 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
11249
11250 static void
11251 emit_frame_save (frame_reg, frame_ptr, mode, regno, offset, total_size)
11252 rtx frame_reg;
11253 rtx frame_ptr;
11254 enum machine_mode mode;
11255 unsigned int regno;
11256 int offset;
11257 int total_size;
11258 {
11259 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
11260 rtx replacea, replaceb;
11261
11262 int_rtx = GEN_INT (offset);
11263
11264 /* Some cases that need register indexed addressing. */
11265 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
11266 || (TARGET_SPE_ABI
11267 && SPE_VECTOR_MODE (mode)
11268 && !SPE_CONST_OFFSET_OK (offset)))
11269 {
11270 /* Whomever calls us must make sure r11 is available in the
11271 flow path of instructions in the prologue. */
11272 offset_rtx = gen_rtx_REG (Pmode, 11);
11273 emit_move_insn (offset_rtx, int_rtx);
11274
11275 replacea = offset_rtx;
11276 replaceb = int_rtx;
11277 }
11278 else
11279 {
11280 offset_rtx = int_rtx;
11281 replacea = NULL_RTX;
11282 replaceb = NULL_RTX;
11283 }
11284
11285 reg = gen_rtx_REG (mode, regno);
11286 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
11287 mem = gen_rtx_MEM (mode, addr);
11288 set_mem_alias_set (mem, rs6000_sr_alias_set);
11289
11290 insn = emit_move_insn (mem, reg);
11291
11292 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
11293 }
11294
11295 /* Emit an offset memory reference suitable for a frame store, while
11296 converting to a valid addressing mode. */
11297
11298 static rtx
11299 gen_frame_mem_offset (mode, reg, offset)
11300 enum machine_mode mode;
11301 rtx reg;
11302 int offset;
11303 {
11304 rtx int_rtx, offset_rtx;
11305
11306 int_rtx = GEN_INT (offset);
11307
11308 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
11309 {
11310 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11311 emit_move_insn (offset_rtx, int_rtx);
11312 }
11313 else
11314 offset_rtx = int_rtx;
11315
11316 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
11317 }
11318
11319 /* Emit function prologue as insns. */
11320
11321 void
11322 rs6000_emit_prologue ()
11323 {
11324 rs6000_stack_t *info = rs6000_stack_info ();
11325 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
11326 int reg_size = TARGET_POWERPC64 ? 8 : 4;
11327 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
11328 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
11329 rtx frame_reg_rtx = sp_reg_rtx;
11330 rtx cr_save_rtx = NULL;
11331 rtx insn;
11332 int saving_FPRs_inline;
11333 int using_store_multiple;
11334 HOST_WIDE_INT sp_offset = 0;
11335
11336 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11337 {
11338 reg_mode = V2SImode;
11339 reg_size = 8;
11340 }
11341
11342 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11343 && (!TARGET_SPE_ABI
11344 || info->spe_64bit_regs_used == 0)
11345 && info->first_gp_reg_save < 31);
11346 saving_FPRs_inline = (info->first_fp_reg_save == 64
11347 || FP_SAVE_INLINE (info->first_fp_reg_save));
11348
11349 /* For V.4, update stack before we do any saving and set back pointer. */
11350 if (info->push_p && DEFAULT_ABI == ABI_V4)
11351 {
11352 if (info->total_size < 32767)
11353 sp_offset = info->total_size;
11354 else
11355 frame_reg_rtx = frame_ptr_rtx;
11356 rs6000_emit_allocate_stack (info->total_size,
11357 (frame_reg_rtx != sp_reg_rtx
11358 && (info->cr_save_p
11359 || info->lr_save_p
11360 || info->first_fp_reg_save < 64
11361 || info->first_gp_reg_save < 32
11362 )));
11363 if (frame_reg_rtx != sp_reg_rtx)
11364 rs6000_emit_stack_tie ();
11365 }
11366
11367 /* Save AltiVec registers if needed. */
11368 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11369 {
11370 int i;
11371
11372 /* There should be a non inline version of this, for when we
11373 are saving lots of vector registers. */
11374 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11375 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11376 {
11377 rtx areg, savereg, mem;
11378 int offset;
11379
11380 offset = info->altivec_save_offset + sp_offset
11381 + 16 * (i - info->first_altivec_reg_save);
11382
11383 savereg = gen_rtx_REG (V4SImode, i);
11384
11385 areg = gen_rtx_REG (Pmode, 0);
11386 emit_move_insn (areg, GEN_INT (offset));
11387
11388 /* AltiVec addressing mode is [reg+reg]. */
11389 mem = gen_rtx_MEM (V4SImode,
11390 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
11391
11392 set_mem_alias_set (mem, rs6000_sr_alias_set);
11393
11394 insn = emit_move_insn (mem, savereg);
11395
11396 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11397 areg, GEN_INT (offset));
11398 }
11399 }
11400
11401 /* VRSAVE is a bit vector representing which AltiVec registers
11402 are used. The OS uses this to determine which vector
11403 registers to save on a context switch. We need to save
11404 VRSAVE on the stack frame, add whatever AltiVec registers we
11405 used in this function, and do the corresponding magic in the
11406 epilogue. */
11407
11408 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
11409 {
11410 rtx reg, mem, vrsave;
11411 int offset;
11412
11413 /* Get VRSAVE onto a GPR. */
11414 reg = gen_rtx_REG (SImode, 12);
11415 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11416 if (TARGET_MACHO)
11417 emit_insn (gen_get_vrsave_internal (reg));
11418 else
11419 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
11420
11421 /* Save VRSAVE. */
11422 offset = info->vrsave_save_offset + sp_offset;
11423 mem
11424 = gen_rtx_MEM (SImode,
11425 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
11426 set_mem_alias_set (mem, rs6000_sr_alias_set);
11427 insn = emit_move_insn (mem, reg);
11428
11429 /* Include the registers in the mask. */
11430 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
11431
11432 insn = emit_insn (generate_set_vrsave (reg, info, 0));
11433 }
11434
11435 /* If we use the link register, get it into r0. */
11436 if (info->lr_save_p)
11437 emit_move_insn (gen_rtx_REG (Pmode, 0),
11438 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
11439
11440 /* If we need to save CR, put it into r12. */
11441 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
11442 {
11443 cr_save_rtx = gen_rtx_REG (SImode, 12);
11444 emit_insn (gen_movesi_from_cr (cr_save_rtx));
11445 }
11446
11447 /* Do any required saving of fpr's. If only one or two to save, do
11448 it ourselves. Otherwise, call function. */
11449 if (saving_FPRs_inline)
11450 {
11451 int i;
11452 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11453 if ((regs_ever_live[info->first_fp_reg_save+i]
11454 && ! call_used_regs[info->first_fp_reg_save+i]))
11455 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
11456 info->first_fp_reg_save + i,
11457 info->fp_save_offset + sp_offset + 8 * i,
11458 info->total_size);
11459 }
11460 else if (info->first_fp_reg_save != 64)
11461 {
11462 int i;
11463 char rname[30];
11464 const char *alloc_rname;
11465 rtvec p;
11466 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
11467
11468 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
11469 gen_rtx_REG (Pmode,
11470 LINK_REGISTER_REGNUM));
11471 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
11472 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
11473 alloc_rname = ggc_strdup (rname);
11474 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11475 gen_rtx_SYMBOL_REF (Pmode,
11476 alloc_rname));
11477 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11478 {
11479 rtx addr, reg, mem;
11480 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
11481 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11482 GEN_INT (info->fp_save_offset
11483 + sp_offset + 8*i));
11484 mem = gen_rtx_MEM (DFmode, addr);
11485 set_mem_alias_set (mem, rs6000_sr_alias_set);
11486
11487 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
11488 }
11489 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11490 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11491 NULL_RTX, NULL_RTX);
11492 }
11493
11494 /* Save GPRs. This is done as a PARALLEL if we are using
11495 the store-multiple instructions. */
11496 if (using_store_multiple)
11497 {
11498 rtvec p;
11499 int i;
11500 p = rtvec_alloc (32 - info->first_gp_reg_save);
11501 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11502 {
11503 rtx addr, reg, mem;
11504 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
11505 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11506 GEN_INT (info->gp_save_offset
11507 + sp_offset
11508 + reg_size * i));
11509 mem = gen_rtx_MEM (reg_mode, addr);
11510 set_mem_alias_set (mem, rs6000_sr_alias_set);
11511
11512 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
11513 }
11514 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11515 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11516 NULL_RTX, NULL_RTX);
11517 }
11518 else
11519 {
11520 int i;
11521 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11522 if ((regs_ever_live[info->first_gp_reg_save+i]
11523 && ! call_used_regs[info->first_gp_reg_save+i])
11524 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
11525 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11526 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
11527 {
11528 rtx addr, reg, mem;
11529 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
11530
11531 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11532 {
11533 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
11534 rtx b;
11535
11536 if (!SPE_CONST_OFFSET_OK (offset))
11537 {
11538 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11539 emit_move_insn (b, GEN_INT (offset));
11540 }
11541 else
11542 b = GEN_INT (offset);
11543
11544 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
11545 mem = gen_rtx_MEM (V2SImode, addr);
11546 set_mem_alias_set (mem, rs6000_sr_alias_set);
11547 insn = emit_move_insn (mem, reg);
11548
11549 if (GET_CODE (b) == CONST_INT)
11550 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11551 NULL_RTX, NULL_RTX);
11552 else
11553 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11554 b, GEN_INT (offset));
11555 }
11556 else
11557 {
11558 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11559 GEN_INT (info->gp_save_offset
11560 + sp_offset
11561 + reg_size * i));
11562 mem = gen_rtx_MEM (reg_mode, addr);
11563 set_mem_alias_set (mem, rs6000_sr_alias_set);
11564
11565 insn = emit_move_insn (mem, reg);
11566 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11567 NULL_RTX, NULL_RTX);
11568 }
11569 }
11570 }
11571
11572 /* ??? There's no need to emit actual instructions here, but it's the
11573 easiest way to get the frame unwind information emitted. */
11574 if (current_function_calls_eh_return)
11575 {
11576 unsigned int i, regno;
11577
11578 for (i = 0; ; ++i)
11579 {
11580 regno = EH_RETURN_DATA_REGNO (i);
11581 if (regno == INVALID_REGNUM)
11582 break;
11583
11584 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
11585 info->ehrd_offset + sp_offset
11586 + reg_size * (int) i,
11587 info->total_size);
11588 }
11589 }
11590
11591 /* Save lr if we used it. */
11592 if (info->lr_save_p)
11593 {
11594 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11595 GEN_INT (info->lr_save_offset + sp_offset));
11596 rtx reg = gen_rtx_REG (Pmode, 0);
11597 rtx mem = gen_rtx_MEM (Pmode, addr);
11598 /* This should not be of rs6000_sr_alias_set, because of
11599 __builtin_return_address. */
11600
11601 insn = emit_move_insn (mem, reg);
11602 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11603 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
11604 }
11605
11606 /* Save CR if we use any that must be preserved. */
11607 if (info->cr_save_p)
11608 {
11609 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11610 GEN_INT (info->cr_save_offset + sp_offset));
11611 rtx mem = gen_rtx_MEM (SImode, addr);
11612
11613 set_mem_alias_set (mem, rs6000_sr_alias_set);
11614
11615 /* If r12 was used to hold the original sp, copy cr into r0 now
11616 that it's free. */
11617 if (REGNO (frame_reg_rtx) == 12)
11618 {
11619 cr_save_rtx = gen_rtx_REG (SImode, 0);
11620 emit_insn (gen_movesi_from_cr (cr_save_rtx));
11621 }
11622 insn = emit_move_insn (mem, cr_save_rtx);
11623
11624 /* Now, there's no way that dwarf2out_frame_debug_expr is going
11625 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
11626 But that's OK. All we have to do is specify that _one_ condition
11627 code register is saved in this stack slot. The thrower's epilogue
11628 will then restore all the call-saved registers.
11629 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
11630 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11631 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
11632 }
11633
11634 /* Update stack and set back pointer unless this is V.4,
11635 for which it was done previously. */
11636 if (info->push_p && DEFAULT_ABI != ABI_V4)
11637 rs6000_emit_allocate_stack (info->total_size, FALSE);
11638
11639 /* Set frame pointer, if needed. */
11640 if (frame_pointer_needed)
11641 {
11642 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
11643 sp_reg_rtx);
11644 RTX_FRAME_RELATED_P (insn) = 1;
11645 }
11646
11647 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
11648 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
11649 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
11650 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
11651 {
11652 /* If emit_load_toc_table will use the link register, we need to save
11653 it. We use R12 for this purpose because emit_load_toc_table
11654 can use register 0. This allows us to use a plain 'blr' to return
11655 from the procedure more often. */
11656 int save_LR_around_toc_setup = (TARGET_ELF
11657 && DEFAULT_ABI != ABI_AIX
11658 && flag_pic
11659 && ! info->lr_save_p
11660 && EXIT_BLOCK_PTR->pred != NULL);
11661 if (save_LR_around_toc_setup)
11662 {
11663 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11664 rs6000_maybe_dead (emit_move_insn (frame_ptr_rtx, lr));
11665 rs6000_emit_load_toc_table (TRUE);
11666 rs6000_maybe_dead (emit_move_insn (lr, frame_ptr_rtx));
11667 }
11668 else
11669 rs6000_emit_load_toc_table (TRUE);
11670 }
11671
11672 #if TARGET_MACHO
11673 if (DEFAULT_ABI == ABI_DARWIN
11674 && flag_pic && current_function_uses_pic_offset_table)
11675 {
11676 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11677 const char *picbase = machopic_function_base_name ();
11678 rtx src = gen_rtx_SYMBOL_REF (Pmode, picbase);
11679
11680 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
11681
11682 rs6000_maybe_dead (
11683 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
11684 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
11685 }
11686 #endif
11687 }
11688
11689 /* Write function prologue. */
11690
11691 static void
11692 rs6000_output_function_prologue (file, size)
11693 FILE *file;
11694 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
11695 {
11696 rs6000_stack_t *info = rs6000_stack_info ();
11697
11698 if (TARGET_DEBUG_STACK)
11699 debug_stack_info (info);
11700
11701 /* Write .extern for any function we will call to save and restore
11702 fp values. */
11703 if (info->first_fp_reg_save < 64
11704 && !FP_SAVE_INLINE (info->first_fp_reg_save))
11705 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
11706 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
11707 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
11708 RESTORE_FP_SUFFIX);
11709
11710 /* Write .extern for AIX common mode routines, if needed. */
11711 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
11712 {
11713 fputs ("\t.extern __mulh\n", file);
11714 fputs ("\t.extern __mull\n", file);
11715 fputs ("\t.extern __divss\n", file);
11716 fputs ("\t.extern __divus\n", file);
11717 fputs ("\t.extern __quoss\n", file);
11718 fputs ("\t.extern __quous\n", file);
11719 common_mode_defined = 1;
11720 }
11721
11722 if (! HAVE_prologue)
11723 {
11724 start_sequence ();
11725
11726 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
11727 the "toplevel" insn chain. */
11728 emit_note (NOTE_INSN_DELETED);
11729 rs6000_emit_prologue ();
11730 emit_note (NOTE_INSN_DELETED);
11731
11732 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11733 {
11734 rtx insn;
11735 unsigned addr = 0;
11736 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11737 {
11738 INSN_ADDRESSES_NEW (insn, addr);
11739 addr += 4;
11740 }
11741 }
11742
11743 if (TARGET_DEBUG_STACK)
11744 debug_rtx_list (get_insns (), 100);
11745 final (get_insns (), file, FALSE, FALSE);
11746 end_sequence ();
11747 }
11748
11749 rs6000_pic_labelno++;
11750 }
11751
11752 /* Emit function epilogue as insns.
11753
11754 At present, dwarf2out_frame_debug_expr doesn't understand
11755 register restores, so we don't bother setting RTX_FRAME_RELATED_P
11756 anywhere in the epilogue. Most of the insns below would in any case
11757 need special notes to explain where r11 is in relation to the stack. */
11758
11759 void
11760 rs6000_emit_epilogue (sibcall)
11761 int sibcall;
11762 {
11763 rs6000_stack_t *info;
11764 int restoring_FPRs_inline;
11765 int using_load_multiple;
11766 int using_mfcr_multiple;
11767 int use_backchain_to_restore_sp;
11768 int sp_offset = 0;
11769 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
11770 rtx frame_reg_rtx = sp_reg_rtx;
11771 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
11772 int reg_size = TARGET_POWERPC64 ? 8 : 4;
11773 int i;
11774
11775 info = rs6000_stack_info ();
11776
11777 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11778 {
11779 reg_mode = V2SImode;
11780 reg_size = 8;
11781 }
11782
11783 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11784 && (!TARGET_SPE_ABI
11785 || info->spe_64bit_regs_used == 0)
11786 && info->first_gp_reg_save < 31);
11787 restoring_FPRs_inline = (sibcall
11788 || current_function_calls_eh_return
11789 || info->first_fp_reg_save == 64
11790 || FP_SAVE_INLINE (info->first_fp_reg_save));
11791 use_backchain_to_restore_sp = (frame_pointer_needed
11792 || current_function_calls_alloca
11793 || info->total_size > 32767);
11794 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
11795 || rs6000_cpu == PROCESSOR_PPC603
11796 || rs6000_cpu == PROCESSOR_PPC750
11797 || optimize_size);
11798
11799 /* If we have a frame pointer, a call to alloca, or a large stack
11800 frame, restore the old stack pointer using the backchain. Otherwise,
11801 we know what size to update it with. */
11802 if (use_backchain_to_restore_sp)
11803 {
11804 /* Under V.4, don't reset the stack pointer until after we're done
11805 loading the saved registers. */
11806 if (DEFAULT_ABI == ABI_V4)
11807 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
11808
11809 emit_move_insn (frame_reg_rtx,
11810 gen_rtx_MEM (Pmode, sp_reg_rtx));
11811
11812 }
11813 else if (info->push_p)
11814 {
11815 if (DEFAULT_ABI == ABI_V4)
11816 sp_offset = info->total_size;
11817 else
11818 {
11819 emit_insn (TARGET_32BIT
11820 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11821 GEN_INT (info->total_size))
11822 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11823 GEN_INT (info->total_size)));
11824 }
11825 }
11826
11827 /* Restore AltiVec registers if needed. */
11828 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11829 {
11830 int i;
11831
11832 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11833 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11834 {
11835 rtx addr, areg, mem;
11836
11837 areg = gen_rtx_REG (Pmode, 0);
11838 emit_move_insn
11839 (areg, GEN_INT (info->altivec_save_offset
11840 + sp_offset
11841 + 16 * (i - info->first_altivec_reg_save)));
11842
11843 /* AltiVec addressing mode is [reg+reg]. */
11844 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
11845 mem = gen_rtx_MEM (V4SImode, addr);
11846 set_mem_alias_set (mem, rs6000_sr_alias_set);
11847
11848 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
11849 }
11850 }
11851
11852 /* Restore VRSAVE if needed. */
11853 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
11854 {
11855 rtx addr, mem, reg;
11856
11857 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11858 GEN_INT (info->vrsave_save_offset + sp_offset));
11859 mem = gen_rtx_MEM (SImode, addr);
11860 set_mem_alias_set (mem, rs6000_sr_alias_set);
11861 reg = gen_rtx_REG (SImode, 12);
11862 emit_move_insn (reg, mem);
11863
11864 emit_insn (generate_set_vrsave (reg, info, 1));
11865 }
11866
11867 /* Get the old lr if we saved it. */
11868 if (info->lr_save_p)
11869 {
11870 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
11871 info->lr_save_offset + sp_offset);
11872
11873 set_mem_alias_set (mem, rs6000_sr_alias_set);
11874
11875 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
11876 }
11877
11878 /* Get the old cr if we saved it. */
11879 if (info->cr_save_p)
11880 {
11881 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11882 GEN_INT (info->cr_save_offset + sp_offset));
11883 rtx mem = gen_rtx_MEM (SImode, addr);
11884
11885 set_mem_alias_set (mem, rs6000_sr_alias_set);
11886
11887 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
11888 }
11889
11890 /* Set LR here to try to overlap restores below. */
11891 if (info->lr_save_p)
11892 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
11893 gen_rtx_REG (Pmode, 0));
11894
11895 /* Load exception handler data registers, if needed. */
11896 if (current_function_calls_eh_return)
11897 {
11898 unsigned int i, regno;
11899
11900 for (i = 0; ; ++i)
11901 {
11902 rtx mem;
11903
11904 regno = EH_RETURN_DATA_REGNO (i);
11905 if (regno == INVALID_REGNUM)
11906 break;
11907
11908 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
11909 info->ehrd_offset + sp_offset
11910 + reg_size * (int) i);
11911 set_mem_alias_set (mem, rs6000_sr_alias_set);
11912
11913 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
11914 }
11915 }
11916
11917 /* Restore GPRs. This is done as a PARALLEL if we are using
11918 the load-multiple instructions. */
11919 if (using_load_multiple)
11920 {
11921 rtvec p;
11922 p = rtvec_alloc (32 - info->first_gp_reg_save);
11923 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11924 {
11925 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11926 GEN_INT (info->gp_save_offset
11927 + sp_offset
11928 + reg_size * i));
11929 rtx mem = gen_rtx_MEM (reg_mode, addr);
11930
11931 set_mem_alias_set (mem, rs6000_sr_alias_set);
11932
11933 RTVEC_ELT (p, i) =
11934 gen_rtx_SET (VOIDmode,
11935 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
11936 mem);
11937 }
11938 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11939 }
11940 else
11941 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11942 if ((regs_ever_live[info->first_gp_reg_save+i]
11943 && ! call_used_regs[info->first_gp_reg_save+i])
11944 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
11945 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11946 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
11947 {
11948 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11949 GEN_INT (info->gp_save_offset
11950 + sp_offset
11951 + reg_size * i));
11952 rtx mem = gen_rtx_MEM (reg_mode, addr);
11953
11954 /* Restore 64-bit quantities for SPE. */
11955 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11956 {
11957 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
11958 rtx b;
11959
11960 if (!SPE_CONST_OFFSET_OK (offset))
11961 {
11962 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11963 emit_move_insn (b, GEN_INT (offset));
11964 }
11965 else
11966 b = GEN_INT (offset);
11967
11968 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
11969 mem = gen_rtx_MEM (V2SImode, addr);
11970 }
11971
11972 set_mem_alias_set (mem, rs6000_sr_alias_set);
11973
11974 emit_move_insn (gen_rtx_REG (reg_mode,
11975 info->first_gp_reg_save + i), mem);
11976 }
11977
11978 /* Restore fpr's if we need to do it without calling a function. */
11979 if (restoring_FPRs_inline)
11980 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11981 if ((regs_ever_live[info->first_fp_reg_save+i]
11982 && ! call_used_regs[info->first_fp_reg_save+i]))
11983 {
11984 rtx addr, mem;
11985 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11986 GEN_INT (info->fp_save_offset
11987 + sp_offset
11988 + 8 * i));
11989 mem = gen_rtx_MEM (DFmode, addr);
11990 set_mem_alias_set (mem, rs6000_sr_alias_set);
11991
11992 emit_move_insn (gen_rtx_REG (DFmode,
11993 info->first_fp_reg_save + i),
11994 mem);
11995 }
11996
11997 /* If we saved cr, restore it here. Just those that were used. */
11998 if (info->cr_save_p)
11999 {
12000 rtx r12_rtx = gen_rtx_REG (SImode, 12);
12001 int count = 0;
12002
12003 if (using_mfcr_multiple)
12004 {
12005 for (i = 0; i < 8; i++)
12006 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12007 count++;
12008 if (count == 0)
12009 abort ();
12010 }
12011
12012 if (using_mfcr_multiple && count > 1)
12013 {
12014 rtvec p;
12015 int ndx;
12016
12017 p = rtvec_alloc (count);
12018
12019 ndx = 0;
12020 for (i = 0; i < 8; i++)
12021 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12022 {
12023 rtvec r = rtvec_alloc (2);
12024 RTVEC_ELT (r, 0) = r12_rtx;
12025 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
12026 RTVEC_ELT (p, ndx) =
12027 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
12028 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
12029 ndx++;
12030 }
12031 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12032 if (ndx != count)
12033 abort ();
12034 }
12035 else
12036 for (i = 0; i < 8; i++)
12037 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12038 {
12039 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
12040 CR0_REGNO+i),
12041 r12_rtx));
12042 }
12043 }
12044
12045 /* If this is V.4, unwind the stack pointer after all of the loads
12046 have been done. We need to emit a block here so that sched
12047 doesn't decide to move the sp change before the register restores
12048 (which may not have any obvious dependency on the stack). This
12049 doesn't hurt performance, because there is no scheduling that can
12050 be done after this point. */
12051 if (DEFAULT_ABI == ABI_V4)
12052 {
12053 if (frame_reg_rtx != sp_reg_rtx)
12054 rs6000_emit_stack_tie ();
12055
12056 if (use_backchain_to_restore_sp)
12057 {
12058 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
12059 }
12060 else if (sp_offset != 0)
12061 {
12062 emit_insn (TARGET_32BIT
12063 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
12064 GEN_INT (sp_offset))
12065 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
12066 GEN_INT (sp_offset)));
12067 }
12068 }
12069
12070 if (current_function_calls_eh_return)
12071 {
12072 rtx sa = EH_RETURN_STACKADJ_RTX;
12073 emit_insn (TARGET_32BIT
12074 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
12075 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
12076 }
12077
12078 if (!sibcall)
12079 {
12080 rtvec p;
12081 if (! restoring_FPRs_inline)
12082 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
12083 else
12084 p = rtvec_alloc (2);
12085
12086 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
12087 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
12088 gen_rtx_REG (Pmode,
12089 LINK_REGISTER_REGNUM));
12090
12091 /* If we have to restore more than two FP registers, branch to the
12092 restore function. It will return to our caller. */
12093 if (! restoring_FPRs_inline)
12094 {
12095 int i;
12096 char rname[30];
12097 const char *alloc_rname;
12098
12099 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
12100 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
12101 alloc_rname = ggc_strdup (rname);
12102 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
12103 gen_rtx_SYMBOL_REF (Pmode,
12104 alloc_rname));
12105
12106 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12107 {
12108 rtx addr, mem;
12109 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
12110 GEN_INT (info->fp_save_offset + 8*i));
12111 mem = gen_rtx_MEM (DFmode, addr);
12112 set_mem_alias_set (mem, rs6000_sr_alias_set);
12113
12114 RTVEC_ELT (p, i+3) =
12115 gen_rtx_SET (VOIDmode,
12116 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
12117 mem);
12118 }
12119 }
12120
12121 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
12122 }
12123 }
12124
12125 /* Write function epilogue. */
12126
12127 static void
12128 rs6000_output_function_epilogue (file, size)
12129 FILE *file;
12130 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
12131 {
12132 rs6000_stack_t *info = rs6000_stack_info ();
12133
12134 if (! HAVE_epilogue)
12135 {
12136 rtx insn = get_last_insn ();
12137 /* If the last insn was a BARRIER, we don't have to write anything except
12138 the trace table. */
12139 if (GET_CODE (insn) == NOTE)
12140 insn = prev_nonnote_insn (insn);
12141 if (insn == 0 || GET_CODE (insn) != BARRIER)
12142 {
12143 /* This is slightly ugly, but at least we don't have two
12144 copies of the epilogue-emitting code. */
12145 start_sequence ();
12146
12147 /* A NOTE_INSN_DELETED is supposed to be at the start
12148 and end of the "toplevel" insn chain. */
12149 emit_note (NOTE_INSN_DELETED);
12150 rs6000_emit_epilogue (FALSE);
12151 emit_note (NOTE_INSN_DELETED);
12152
12153 /* Expand INSN_ADDRESSES so final() doesn't crash. */
12154 {
12155 rtx insn;
12156 unsigned addr = 0;
12157 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
12158 {
12159 INSN_ADDRESSES_NEW (insn, addr);
12160 addr += 4;
12161 }
12162 }
12163
12164 if (TARGET_DEBUG_STACK)
12165 debug_rtx_list (get_insns (), 100);
12166 final (get_insns (), file, FALSE, FALSE);
12167 end_sequence ();
12168 }
12169 }
12170
12171 /* Output a traceback table here. See /usr/include/sys/debug.h for info
12172 on its format.
12173
12174 We don't output a traceback table if -finhibit-size-directive was
12175 used. The documentation for -finhibit-size-directive reads
12176 ``don't output a @code{.size} assembler directive, or anything
12177 else that would cause trouble if the function is split in the
12178 middle, and the two halves are placed at locations far apart in
12179 memory.'' The traceback table has this property, since it
12180 includes the offset from the start of the function to the
12181 traceback table itself.
12182
12183 System V.4 Powerpc's (and the embedded ABI derived from it) use a
12184 different traceback table. */
12185 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
12186 && rs6000_traceback != traceback_none)
12187 {
12188 const char *fname = NULL;
12189 const char *language_string = lang_hooks.name;
12190 int fixed_parms = 0, float_parms = 0, parm_info = 0;
12191 int i;
12192 int optional_tbtab;
12193
12194 if (rs6000_traceback == traceback_full)
12195 optional_tbtab = 1;
12196 else if (rs6000_traceback == traceback_part)
12197 optional_tbtab = 0;
12198 else
12199 optional_tbtab = !optimize_size && !TARGET_ELF;
12200
12201 if (optional_tbtab)
12202 {
12203 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
12204 while (*fname == '.') /* V.4 encodes . in the name */
12205 fname++;
12206
12207 /* Need label immediately before tbtab, so we can compute
12208 its offset from the function start. */
12209 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12210 ASM_OUTPUT_LABEL (file, fname);
12211 }
12212
12213 /* The .tbtab pseudo-op can only be used for the first eight
12214 expressions, since it can't handle the possibly variable
12215 length fields that follow. However, if you omit the optional
12216 fields, the assembler outputs zeros for all optional fields
12217 anyways, giving each variable length field is minimum length
12218 (as defined in sys/debug.h). Thus we can not use the .tbtab
12219 pseudo-op at all. */
12220
12221 /* An all-zero word flags the start of the tbtab, for debuggers
12222 that have to find it by searching forward from the entry
12223 point or from the current pc. */
12224 fputs ("\t.long 0\n", file);
12225
12226 /* Tbtab format type. Use format type 0. */
12227 fputs ("\t.byte 0,", file);
12228
12229 /* Language type. Unfortunately, there doesn't seem to be any
12230 official way to get this info, so we use language_string. C
12231 is 0. C++ is 9. No number defined for Obj-C, so use the
12232 value for C for now. There is no official value for Java,
12233 although IBM appears to be using 13. There is no official value
12234 for Chill, so we've chosen 44 pseudo-randomly. */
12235 if (! strcmp (language_string, "GNU C")
12236 || ! strcmp (language_string, "GNU Objective-C"))
12237 i = 0;
12238 else if (! strcmp (language_string, "GNU F77"))
12239 i = 1;
12240 else if (! strcmp (language_string, "GNU Ada"))
12241 i = 3;
12242 else if (! strcmp (language_string, "GNU Pascal"))
12243 i = 2;
12244 else if (! strcmp (language_string, "GNU C++"))
12245 i = 9;
12246 else if (! strcmp (language_string, "GNU Java"))
12247 i = 13;
12248 else if (! strcmp (language_string, "GNU CHILL"))
12249 i = 44;
12250 else
12251 abort ();
12252 fprintf (file, "%d,", i);
12253
12254 /* 8 single bit fields: global linkage (not set for C extern linkage,
12255 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
12256 from start of procedure stored in tbtab, internal function, function
12257 has controlled storage, function has no toc, function uses fp,
12258 function logs/aborts fp operations. */
12259 /* Assume that fp operations are used if any fp reg must be saved. */
12260 fprintf (file, "%d,",
12261 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
12262
12263 /* 6 bitfields: function is interrupt handler, name present in
12264 proc table, function calls alloca, on condition directives
12265 (controls stack walks, 3 bits), saves condition reg, saves
12266 link reg. */
12267 /* The `function calls alloca' bit seems to be set whenever reg 31 is
12268 set up as a frame pointer, even when there is no alloca call. */
12269 fprintf (file, "%d,",
12270 ((optional_tbtab << 6)
12271 | ((optional_tbtab & frame_pointer_needed) << 5)
12272 | (info->cr_save_p << 1)
12273 | (info->lr_save_p)));
12274
12275 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
12276 (6 bits). */
12277 fprintf (file, "%d,",
12278 (info->push_p << 7) | (64 - info->first_fp_reg_save));
12279
12280 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
12281 fprintf (file, "%d,", (32 - first_reg_to_save ()));
12282
12283 if (optional_tbtab)
12284 {
12285 /* Compute the parameter info from the function decl argument
12286 list. */
12287 tree decl;
12288 int next_parm_info_bit = 31;
12289
12290 for (decl = DECL_ARGUMENTS (current_function_decl);
12291 decl; decl = TREE_CHAIN (decl))
12292 {
12293 rtx parameter = DECL_INCOMING_RTL (decl);
12294 enum machine_mode mode = GET_MODE (parameter);
12295
12296 if (GET_CODE (parameter) == REG)
12297 {
12298 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
12299 {
12300 int bits;
12301
12302 float_parms++;
12303
12304 if (mode == SFmode)
12305 bits = 0x2;
12306 else if (mode == DFmode || mode == TFmode)
12307 bits = 0x3;
12308 else
12309 abort ();
12310
12311 /* If only one bit will fit, don't or in this entry. */
12312 if (next_parm_info_bit > 0)
12313 parm_info |= (bits << (next_parm_info_bit - 1));
12314 next_parm_info_bit -= 2;
12315 }
12316 else
12317 {
12318 fixed_parms += ((GET_MODE_SIZE (mode)
12319 + (UNITS_PER_WORD - 1))
12320 / UNITS_PER_WORD);
12321 next_parm_info_bit -= 1;
12322 }
12323 }
12324 }
12325 }
12326
12327 /* Number of fixed point parameters. */
12328 /* This is actually the number of words of fixed point parameters; thus
12329 an 8 byte struct counts as 2; and thus the maximum value is 8. */
12330 fprintf (file, "%d,", fixed_parms);
12331
12332 /* 2 bitfields: number of floating point parameters (7 bits), parameters
12333 all on stack. */
12334 /* This is actually the number of fp registers that hold parameters;
12335 and thus the maximum value is 13. */
12336 /* Set parameters on stack bit if parameters are not in their original
12337 registers, regardless of whether they are on the stack? Xlc
12338 seems to set the bit when not optimizing. */
12339 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
12340
12341 if (! optional_tbtab)
12342 return;
12343
12344 /* Optional fields follow. Some are variable length. */
12345
12346 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
12347 11 double float. */
12348 /* There is an entry for each parameter in a register, in the order that
12349 they occur in the parameter list. Any intervening arguments on the
12350 stack are ignored. If the list overflows a long (max possible length
12351 34 bits) then completely leave off all elements that don't fit. */
12352 /* Only emit this long if there was at least one parameter. */
12353 if (fixed_parms || float_parms)
12354 fprintf (file, "\t.long %d\n", parm_info);
12355
12356 /* Offset from start of code to tb table. */
12357 fputs ("\t.long ", file);
12358 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12359 #if TARGET_AIX
12360 RS6000_OUTPUT_BASENAME (file, fname);
12361 #else
12362 assemble_name (file, fname);
12363 #endif
12364 fputs ("-.", file);
12365 #if TARGET_AIX
12366 RS6000_OUTPUT_BASENAME (file, fname);
12367 #else
12368 assemble_name (file, fname);
12369 #endif
12370 putc ('\n', file);
12371
12372 /* Interrupt handler mask. */
12373 /* Omit this long, since we never set the interrupt handler bit
12374 above. */
12375
12376 /* Number of CTL (controlled storage) anchors. */
12377 /* Omit this long, since the has_ctl bit is never set above. */
12378
12379 /* Displacement into stack of each CTL anchor. */
12380 /* Omit this list of longs, because there are no CTL anchors. */
12381
12382 /* Length of function name. */
12383 if (*fname == '*')
12384 ++fname;
12385 fprintf (file, "\t.short %d\n", (int) strlen (fname));
12386
12387 /* Function name. */
12388 assemble_string (fname, strlen (fname));
12389
12390 /* Register for alloca automatic storage; this is always reg 31.
12391 Only emit this if the alloca bit was set above. */
12392 if (frame_pointer_needed)
12393 fputs ("\t.byte 31\n", file);
12394
12395 fputs ("\t.align 2\n", file);
12396 }
12397 }
12398 \f
12399 /* A C compound statement that outputs the assembler code for a thunk
12400 function, used to implement C++ virtual function calls with
12401 multiple inheritance. The thunk acts as a wrapper around a virtual
12402 function, adjusting the implicit object parameter before handing
12403 control off to the real function.
12404
12405 First, emit code to add the integer DELTA to the location that
12406 contains the incoming first argument. Assume that this argument
12407 contains a pointer, and is the one used to pass the `this' pointer
12408 in C++. This is the incoming argument *before* the function
12409 prologue, e.g. `%o0' on a sparc. The addition must preserve the
12410 values of all other incoming arguments.
12411
12412 After the addition, emit code to jump to FUNCTION, which is a
12413 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
12414 not touch the return address. Hence returning from FUNCTION will
12415 return to whoever called the current `thunk'.
12416
12417 The effect must be as if FUNCTION had been called directly with the
12418 adjusted first argument. This macro is responsible for emitting
12419 all of the code for a thunk function; output_function_prologue()
12420 and output_function_epilogue() are not invoked.
12421
12422 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
12423 been extracted from it.) It might possibly be useful on some
12424 targets, but probably not.
12425
12426 If you do not define this macro, the target-independent code in the
12427 C++ frontend will generate a less efficient heavyweight thunk that
12428 calls FUNCTION instead of jumping to it. The generic approach does
12429 not support varargs. */
12430
12431 static void
12432 rs6000_output_mi_thunk (file, thunk_fndecl, delta, vcall_offset, function)
12433 FILE *file;
12434 tree thunk_fndecl ATTRIBUTE_UNUSED;
12435 HOST_WIDE_INT delta;
12436 HOST_WIDE_INT vcall_offset;
12437 tree function;
12438 {
12439 rtx this, insn, funexp;
12440
12441 reload_completed = 1;
12442 epilogue_completed = 1;
12443 no_new_pseudos = 1;
12444
12445 /* Mark the end of the (empty) prologue. */
12446 emit_note (NOTE_INSN_PROLOGUE_END);
12447
12448 /* Find the "this" pointer. If the function returns a structure,
12449 the structure return pointer is in r3. */
12450 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
12451 this = gen_rtx_REG (Pmode, 4);
12452 else
12453 this = gen_rtx_REG (Pmode, 3);
12454
12455 /* Apply the constant offset, if required. */
12456 if (delta)
12457 {
12458 rtx delta_rtx = GEN_INT (delta);
12459 emit_insn (TARGET_32BIT
12460 ? gen_addsi3 (this, this, delta_rtx)
12461 : gen_adddi3 (this, this, delta_rtx));
12462 }
12463
12464 /* Apply the offset from the vtable, if required. */
12465 if (vcall_offset)
12466 {
12467 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
12468 rtx tmp = gen_rtx_REG (Pmode, 12);
12469
12470 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
12471 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
12472 {
12473 emit_insn (TARGET_32BIT
12474 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
12475 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
12476 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
12477 }
12478 else
12479 {
12480 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
12481
12482 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
12483 }
12484 emit_insn (TARGET_32BIT
12485 ? gen_addsi3 (this, this, tmp)
12486 : gen_adddi3 (this, this, tmp));
12487 }
12488
12489 /* Generate a tail call to the target function. */
12490 if (!TREE_USED (function))
12491 {
12492 assemble_external (function);
12493 TREE_USED (function) = 1;
12494 }
12495 funexp = XEXP (DECL_RTL (function), 0);
12496 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
12497
12498 #if TARGET_MACHO
12499 if (MACHOPIC_INDIRECT)
12500 funexp = machopic_indirect_call_target (funexp);
12501 #endif
12502
12503 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
12504 generate sibcall RTL explicitly to avoid constraint abort. */
12505 insn = emit_call_insn (
12506 gen_rtx_PARALLEL (VOIDmode,
12507 gen_rtvec (4,
12508 gen_rtx_CALL (VOIDmode,
12509 funexp, const0_rtx),
12510 gen_rtx_USE (VOIDmode, const0_rtx),
12511 gen_rtx_USE (VOIDmode,
12512 gen_rtx_REG (SImode,
12513 LINK_REGISTER_REGNUM)),
12514 gen_rtx_RETURN (VOIDmode))));
12515 SIBLING_CALL_P (insn) = 1;
12516 emit_barrier ();
12517
12518 /* Run just enough of rest_of_compilation to get the insns emitted.
12519 There's not really enough bulk here to make other passes such as
12520 instruction scheduling worth while. Note that use_thunk calls
12521 assemble_start_function and assemble_end_function. */
12522 insn = get_insns ();
12523 insn_locators_initialize ();
12524 shorten_branches (insn);
12525 final_start_function (insn, file, 1);
12526 final (insn, file, 1, 0);
12527 final_end_function ();
12528
12529 reload_completed = 0;
12530 epilogue_completed = 0;
12531 no_new_pseudos = 0;
12532 }
12533 \f
12534 /* A quick summary of the various types of 'constant-pool tables'
12535 under PowerPC:
12536
12537 Target Flags Name One table per
12538 AIX (none) AIX TOC object file
12539 AIX -mfull-toc AIX TOC object file
12540 AIX -mminimal-toc AIX minimal TOC translation unit
12541 SVR4/EABI (none) SVR4 SDATA object file
12542 SVR4/EABI -fpic SVR4 pic object file
12543 SVR4/EABI -fPIC SVR4 PIC translation unit
12544 SVR4/EABI -mrelocatable EABI TOC function
12545 SVR4/EABI -maix AIX TOC object file
12546 SVR4/EABI -maix -mminimal-toc
12547 AIX minimal TOC translation unit
12548
12549 Name Reg. Set by entries contains:
12550 made by addrs? fp? sum?
12551
12552 AIX TOC 2 crt0 as Y option option
12553 AIX minimal TOC 30 prolog gcc Y Y option
12554 SVR4 SDATA 13 crt0 gcc N Y N
12555 SVR4 pic 30 prolog ld Y not yet N
12556 SVR4 PIC 30 prolog gcc Y option option
12557 EABI TOC 30 prolog gcc Y option option
12558
12559 */
12560
12561 /* Hash functions for the hash table. */
12562
12563 static unsigned
12564 rs6000_hash_constant (k)
12565 rtx k;
12566 {
12567 enum rtx_code code = GET_CODE (k);
12568 enum machine_mode mode = GET_MODE (k);
12569 unsigned result = (code << 3) ^ mode;
12570 const char *format;
12571 int flen, fidx;
12572
12573 format = GET_RTX_FORMAT (code);
12574 flen = strlen (format);
12575 fidx = 0;
12576
12577 switch (code)
12578 {
12579 case LABEL_REF:
12580 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
12581
12582 case CONST_DOUBLE:
12583 if (mode != VOIDmode)
12584 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
12585 flen = 2;
12586 break;
12587
12588 case CODE_LABEL:
12589 fidx = 3;
12590 break;
12591
12592 default:
12593 break;
12594 }
12595
12596 for (; fidx < flen; fidx++)
12597 switch (format[fidx])
12598 {
12599 case 's':
12600 {
12601 unsigned i, len;
12602 const char *str = XSTR (k, fidx);
12603 len = strlen (str);
12604 result = result * 613 + len;
12605 for (i = 0; i < len; i++)
12606 result = result * 613 + (unsigned) str[i];
12607 break;
12608 }
12609 case 'u':
12610 case 'e':
12611 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
12612 break;
12613 case 'i':
12614 case 'n':
12615 result = result * 613 + (unsigned) XINT (k, fidx);
12616 break;
12617 case 'w':
12618 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
12619 result = result * 613 + (unsigned) XWINT (k, fidx);
12620 else
12621 {
12622 size_t i;
12623 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
12624 result = result * 613 + (unsigned) (XWINT (k, fidx)
12625 >> CHAR_BIT * i);
12626 }
12627 break;
12628 case '0':
12629 break;
12630 default:
12631 abort ();
12632 }
12633
12634 return result;
12635 }
12636
12637 static unsigned
12638 toc_hash_function (hash_entry)
12639 const void * hash_entry;
12640 {
12641 const struct toc_hash_struct *thc =
12642 (const struct toc_hash_struct *) hash_entry;
12643 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
12644 }
12645
12646 /* Compare H1 and H2 for equivalence. */
12647
12648 static int
12649 toc_hash_eq (h1, h2)
12650 const void * h1;
12651 const void * h2;
12652 {
12653 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
12654 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
12655
12656 if (((const struct toc_hash_struct *) h1)->key_mode
12657 != ((const struct toc_hash_struct *) h2)->key_mode)
12658 return 0;
12659
12660 return rtx_equal_p (r1, r2);
12661 }
12662
12663 /* These are the names given by the C++ front-end to vtables, and
12664 vtable-like objects. Ideally, this logic should not be here;
12665 instead, there should be some programmatic way of inquiring as
12666 to whether or not an object is a vtable. */
12667
12668 #define VTABLE_NAME_P(NAME) \
12669 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
12670 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
12671 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
12672 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
12673
12674 void
12675 rs6000_output_symbol_ref (file, x)
12676 FILE *file;
12677 rtx x;
12678 {
12679 /* Currently C++ toc references to vtables can be emitted before it
12680 is decided whether the vtable is public or private. If this is
12681 the case, then the linker will eventually complain that there is
12682 a reference to an unknown section. Thus, for vtables only,
12683 we emit the TOC reference to reference the symbol and not the
12684 section. */
12685 const char *name = XSTR (x, 0);
12686
12687 if (VTABLE_NAME_P (name))
12688 {
12689 RS6000_OUTPUT_BASENAME (file, name);
12690 }
12691 else
12692 assemble_name (file, name);
12693 }
12694
12695 /* Output a TOC entry. We derive the entry name from what is being
12696 written. */
12697
12698 void
12699 output_toc (file, x, labelno, mode)
12700 FILE *file;
12701 rtx x;
12702 int labelno;
12703 enum machine_mode mode;
12704 {
12705 char buf[256];
12706 const char *name = buf;
12707 const char *real_name;
12708 rtx base = x;
12709 int offset = 0;
12710
12711 if (TARGET_NO_TOC)
12712 abort ();
12713
12714 /* When the linker won't eliminate them, don't output duplicate
12715 TOC entries (this happens on AIX if there is any kind of TOC,
12716 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
12717 CODE_LABELs. */
12718 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
12719 {
12720 struct toc_hash_struct *h;
12721 void * * found;
12722
12723 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
12724 time because GGC is not initialized at that point. */
12725 if (toc_hash_table == NULL)
12726 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
12727 toc_hash_eq, NULL);
12728
12729 h = ggc_alloc (sizeof (*h));
12730 h->key = x;
12731 h->key_mode = mode;
12732 h->labelno = labelno;
12733
12734 found = htab_find_slot (toc_hash_table, h, 1);
12735 if (*found == NULL)
12736 *found = h;
12737 else /* This is indeed a duplicate.
12738 Set this label equal to that label. */
12739 {
12740 fputs ("\t.set ", file);
12741 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
12742 fprintf (file, "%d,", labelno);
12743 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
12744 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
12745 found)->labelno));
12746 return;
12747 }
12748 }
12749
12750 /* If we're going to put a double constant in the TOC, make sure it's
12751 aligned properly when strict alignment is on. */
12752 if (GET_CODE (x) == CONST_DOUBLE
12753 && STRICT_ALIGNMENT
12754 && GET_MODE_BITSIZE (mode) >= 64
12755 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
12756 ASM_OUTPUT_ALIGN (file, 3);
12757 }
12758
12759 (*targetm.asm_out.internal_label) (file, "LC", labelno);
12760
12761 /* Handle FP constants specially. Note that if we have a minimal
12762 TOC, things we put here aren't actually in the TOC, so we can allow
12763 FP constants. */
12764 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
12765 {
12766 REAL_VALUE_TYPE rv;
12767 long k[4];
12768
12769 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12770 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
12771
12772 if (TARGET_64BIT)
12773 {
12774 if (TARGET_MINIMAL_TOC)
12775 fputs (DOUBLE_INT_ASM_OP, file);
12776 else
12777 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12778 k[0] & 0xffffffff, k[1] & 0xffffffff,
12779 k[2] & 0xffffffff, k[3] & 0xffffffff);
12780 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
12781 k[0] & 0xffffffff, k[1] & 0xffffffff,
12782 k[2] & 0xffffffff, k[3] & 0xffffffff);
12783 return;
12784 }
12785 else
12786 {
12787 if (TARGET_MINIMAL_TOC)
12788 fputs ("\t.long ", file);
12789 else
12790 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12791 k[0] & 0xffffffff, k[1] & 0xffffffff,
12792 k[2] & 0xffffffff, k[3] & 0xffffffff);
12793 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
12794 k[0] & 0xffffffff, k[1] & 0xffffffff,
12795 k[2] & 0xffffffff, k[3] & 0xffffffff);
12796 return;
12797 }
12798 }
12799 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
12800 {
12801 REAL_VALUE_TYPE rv;
12802 long k[2];
12803
12804 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12805 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
12806
12807 if (TARGET_64BIT)
12808 {
12809 if (TARGET_MINIMAL_TOC)
12810 fputs (DOUBLE_INT_ASM_OP, file);
12811 else
12812 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
12813 k[0] & 0xffffffff, k[1] & 0xffffffff);
12814 fprintf (file, "0x%lx%08lx\n",
12815 k[0] & 0xffffffff, k[1] & 0xffffffff);
12816 return;
12817 }
12818 else
12819 {
12820 if (TARGET_MINIMAL_TOC)
12821 fputs ("\t.long ", file);
12822 else
12823 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
12824 k[0] & 0xffffffff, k[1] & 0xffffffff);
12825 fprintf (file, "0x%lx,0x%lx\n",
12826 k[0] & 0xffffffff, k[1] & 0xffffffff);
12827 return;
12828 }
12829 }
12830 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
12831 {
12832 REAL_VALUE_TYPE rv;
12833 long l;
12834
12835 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12836 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
12837
12838 if (TARGET_64BIT)
12839 {
12840 if (TARGET_MINIMAL_TOC)
12841 fputs (DOUBLE_INT_ASM_OP, file);
12842 else
12843 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
12844 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
12845 return;
12846 }
12847 else
12848 {
12849 if (TARGET_MINIMAL_TOC)
12850 fputs ("\t.long ", file);
12851 else
12852 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
12853 fprintf (file, "0x%lx\n", l & 0xffffffff);
12854 return;
12855 }
12856 }
12857 else if (GET_MODE (x) == VOIDmode
12858 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
12859 {
12860 unsigned HOST_WIDE_INT low;
12861 HOST_WIDE_INT high;
12862
12863 if (GET_CODE (x) == CONST_DOUBLE)
12864 {
12865 low = CONST_DOUBLE_LOW (x);
12866 high = CONST_DOUBLE_HIGH (x);
12867 }
12868 else
12869 #if HOST_BITS_PER_WIDE_INT == 32
12870 {
12871 low = INTVAL (x);
12872 high = (low & 0x80000000) ? ~0 : 0;
12873 }
12874 #else
12875 {
12876 low = INTVAL (x) & 0xffffffff;
12877 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
12878 }
12879 #endif
12880
12881 /* TOC entries are always Pmode-sized, but since this
12882 is a bigendian machine then if we're putting smaller
12883 integer constants in the TOC we have to pad them.
12884 (This is still a win over putting the constants in
12885 a separate constant pool, because then we'd have
12886 to have both a TOC entry _and_ the actual constant.)
12887
12888 For a 32-bit target, CONST_INT values are loaded and shifted
12889 entirely within `low' and can be stored in one TOC entry. */
12890
12891 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
12892 abort ();/* It would be easy to make this work, but it doesn't now. */
12893
12894 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
12895 {
12896 #if HOST_BITS_PER_WIDE_INT == 32
12897 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
12898 POINTER_SIZE, &low, &high, 0);
12899 #else
12900 low |= high << 32;
12901 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
12902 high = (HOST_WIDE_INT) low >> 32;
12903 low &= 0xffffffff;
12904 #endif
12905 }
12906
12907 if (TARGET_64BIT)
12908 {
12909 if (TARGET_MINIMAL_TOC)
12910 fputs (DOUBLE_INT_ASM_OP, file);
12911 else
12912 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
12913 (long) high & 0xffffffff, (long) low & 0xffffffff);
12914 fprintf (file, "0x%lx%08lx\n",
12915 (long) high & 0xffffffff, (long) low & 0xffffffff);
12916 return;
12917 }
12918 else
12919 {
12920 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
12921 {
12922 if (TARGET_MINIMAL_TOC)
12923 fputs ("\t.long ", file);
12924 else
12925 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
12926 (long) high & 0xffffffff, (long) low & 0xffffffff);
12927 fprintf (file, "0x%lx,0x%lx\n",
12928 (long) high & 0xffffffff, (long) low & 0xffffffff);
12929 }
12930 else
12931 {
12932 if (TARGET_MINIMAL_TOC)
12933 fputs ("\t.long ", file);
12934 else
12935 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
12936 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
12937 }
12938 return;
12939 }
12940 }
12941
12942 if (GET_CODE (x) == CONST)
12943 {
12944 if (GET_CODE (XEXP (x, 0)) != PLUS)
12945 abort ();
12946
12947 base = XEXP (XEXP (x, 0), 0);
12948 offset = INTVAL (XEXP (XEXP (x, 0), 1));
12949 }
12950
12951 if (GET_CODE (base) == SYMBOL_REF)
12952 name = XSTR (base, 0);
12953 else if (GET_CODE (base) == LABEL_REF)
12954 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
12955 else if (GET_CODE (base) == CODE_LABEL)
12956 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
12957 else
12958 abort ();
12959
12960 real_name = (*targetm.strip_name_encoding) (name);
12961 if (TARGET_MINIMAL_TOC)
12962 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
12963 else
12964 {
12965 fprintf (file, "\t.tc %s", real_name);
12966
12967 if (offset < 0)
12968 fprintf (file, ".N%d", - offset);
12969 else if (offset)
12970 fprintf (file, ".P%d", offset);
12971
12972 fputs ("[TC],", file);
12973 }
12974
12975 /* Currently C++ toc references to vtables can be emitted before it
12976 is decided whether the vtable is public or private. If this is
12977 the case, then the linker will eventually complain that there is
12978 a TOC reference to an unknown section. Thus, for vtables only,
12979 we emit the TOC reference to reference the symbol and not the
12980 section. */
12981 if (VTABLE_NAME_P (name))
12982 {
12983 RS6000_OUTPUT_BASENAME (file, name);
12984 if (offset < 0)
12985 fprintf (file, "%d", offset);
12986 else if (offset > 0)
12987 fprintf (file, "+%d", offset);
12988 }
12989 else
12990 output_addr_const (file, x);
12991 putc ('\n', file);
12992 }
12993 \f
12994 /* Output an assembler pseudo-op to write an ASCII string of N characters
12995 starting at P to FILE.
12996
12997 On the RS/6000, we have to do this using the .byte operation and
12998 write out special characters outside the quoted string.
12999 Also, the assembler is broken; very long strings are truncated,
13000 so we must artificially break them up early. */
13001
13002 void
13003 output_ascii (file, p, n)
13004 FILE *file;
13005 const char *p;
13006 int n;
13007 {
13008 char c;
13009 int i, count_string;
13010 const char *for_string = "\t.byte \"";
13011 const char *for_decimal = "\t.byte ";
13012 const char *to_close = NULL;
13013
13014 count_string = 0;
13015 for (i = 0; i < n; i++)
13016 {
13017 c = *p++;
13018 if (c >= ' ' && c < 0177)
13019 {
13020 if (for_string)
13021 fputs (for_string, file);
13022 putc (c, file);
13023
13024 /* Write two quotes to get one. */
13025 if (c == '"')
13026 {
13027 putc (c, file);
13028 ++count_string;
13029 }
13030
13031 for_string = NULL;
13032 for_decimal = "\"\n\t.byte ";
13033 to_close = "\"\n";
13034 ++count_string;
13035
13036 if (count_string >= 512)
13037 {
13038 fputs (to_close, file);
13039
13040 for_string = "\t.byte \"";
13041 for_decimal = "\t.byte ";
13042 to_close = NULL;
13043 count_string = 0;
13044 }
13045 }
13046 else
13047 {
13048 if (for_decimal)
13049 fputs (for_decimal, file);
13050 fprintf (file, "%d", c);
13051
13052 for_string = "\n\t.byte \"";
13053 for_decimal = ", ";
13054 to_close = "\n";
13055 count_string = 0;
13056 }
13057 }
13058
13059 /* Now close the string if we have written one. Then end the line. */
13060 if (to_close)
13061 fputs (to_close, file);
13062 }
13063 \f
13064 /* Generate a unique section name for FILENAME for a section type
13065 represented by SECTION_DESC. Output goes into BUF.
13066
13067 SECTION_DESC can be any string, as long as it is different for each
13068 possible section type.
13069
13070 We name the section in the same manner as xlc. The name begins with an
13071 underscore followed by the filename (after stripping any leading directory
13072 names) with the last period replaced by the string SECTION_DESC. If
13073 FILENAME does not contain a period, SECTION_DESC is appended to the end of
13074 the name. */
13075
13076 void
13077 rs6000_gen_section_name (buf, filename, section_desc)
13078 char **buf;
13079 const char *filename;
13080 const char *section_desc;
13081 {
13082 const char *q, *after_last_slash, *last_period = 0;
13083 char *p;
13084 int len;
13085
13086 after_last_slash = filename;
13087 for (q = filename; *q; q++)
13088 {
13089 if (*q == '/')
13090 after_last_slash = q + 1;
13091 else if (*q == '.')
13092 last_period = q;
13093 }
13094
13095 len = strlen (after_last_slash) + strlen (section_desc) + 2;
13096 *buf = (char *) xmalloc (len);
13097
13098 p = *buf;
13099 *p++ = '_';
13100
13101 for (q = after_last_slash; *q; q++)
13102 {
13103 if (q == last_period)
13104 {
13105 strcpy (p, section_desc);
13106 p += strlen (section_desc);
13107 break;
13108 }
13109
13110 else if (ISALNUM (*q))
13111 *p++ = *q;
13112 }
13113
13114 if (last_period == 0)
13115 strcpy (p, section_desc);
13116 else
13117 *p = '\0';
13118 }
13119 \f
13120 /* Emit profile function. */
13121
13122 void
13123 output_profile_hook (labelno)
13124 int labelno ATTRIBUTE_UNUSED;
13125 {
13126 if (TARGET_PROFILE_KERNEL)
13127 return;
13128
13129 if (DEFAULT_ABI == ABI_AIX)
13130 {
13131 #ifndef NO_PROFILE_COUNTERS
13132 # define NO_PROFILE_COUNTERS 0
13133 #endif
13134 if (NO_PROFILE_COUNTERS)
13135 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
13136 else
13137 {
13138 char buf[30];
13139 const char *label_name;
13140 rtx fun;
13141
13142 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
13143 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
13144 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
13145
13146 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
13147 fun, Pmode);
13148 }
13149 }
13150 else if (DEFAULT_ABI == ABI_DARWIN)
13151 {
13152 const char *mcount_name = RS6000_MCOUNT;
13153 int caller_addr_regno = LINK_REGISTER_REGNUM;
13154
13155 /* Be conservative and always set this, at least for now. */
13156 current_function_uses_pic_offset_table = 1;
13157
13158 #if TARGET_MACHO
13159 /* For PIC code, set up a stub and collect the caller's address
13160 from r0, which is where the prologue puts it. */
13161 if (MACHOPIC_INDIRECT)
13162 {
13163 mcount_name = machopic_stub_name (mcount_name);
13164 if (current_function_uses_pic_offset_table)
13165 caller_addr_regno = 0;
13166 }
13167 #endif
13168 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
13169 0, VOIDmode, 1,
13170 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
13171 }
13172 }
13173
13174 /* Write function profiler code. */
13175
13176 void
13177 output_function_profiler (file, labelno)
13178 FILE *file;
13179 int labelno;
13180 {
13181 char buf[100];
13182 int save_lr = 8;
13183
13184 switch (DEFAULT_ABI)
13185 {
13186 default:
13187 abort ();
13188
13189 case ABI_V4:
13190 save_lr = 4;
13191 if (!TARGET_32BIT)
13192 {
13193 warning ("no profiling of 64-bit code for this ABI");
13194 return;
13195 }
13196 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
13197 fprintf (file, "\tmflr %s\n", reg_names[0]);
13198 if (flag_pic == 1)
13199 {
13200 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
13201 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13202 reg_names[0], save_lr, reg_names[1]);
13203 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
13204 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
13205 assemble_name (file, buf);
13206 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
13207 }
13208 else if (flag_pic > 1)
13209 {
13210 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13211 reg_names[0], save_lr, reg_names[1]);
13212 /* Now, we need to get the address of the label. */
13213 fputs ("\tbl 1f\n\t.long ", file);
13214 assemble_name (file, buf);
13215 fputs ("-.\n1:", file);
13216 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
13217 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
13218 reg_names[0], reg_names[11]);
13219 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
13220 reg_names[0], reg_names[0], reg_names[11]);
13221 }
13222 else
13223 {
13224 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
13225 assemble_name (file, buf);
13226 fputs ("@ha\n", file);
13227 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13228 reg_names[0], save_lr, reg_names[1]);
13229 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
13230 assemble_name (file, buf);
13231 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
13232 }
13233
13234 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
13235 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13236 break;
13237
13238 case ABI_AIX:
13239 case ABI_DARWIN:
13240 if (!TARGET_PROFILE_KERNEL)
13241 {
13242 /* Don't do anything, done in output_profile_hook (). */
13243 }
13244 else
13245 {
13246 if (TARGET_32BIT)
13247 abort ();
13248
13249 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
13250 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
13251
13252 if (current_function_needs_context)
13253 {
13254 asm_fprintf (file, "\tstd %s,24(%s)\n",
13255 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13256 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13257 asm_fprintf (file, "\tld %s,24(%s)\n",
13258 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13259 }
13260 else
13261 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13262 }
13263 break;
13264 }
13265 }
13266
13267 \f
13268 static int
13269 rs6000_use_dfa_pipeline_interface ()
13270 {
13271 return 1;
13272 }
13273
13274 /* Power4 load update and store update instructions are cracked into a
13275 load or store and an integer insn which are executed in the same cycle.
13276 Branches have their own dispatch slot which does not count against the
13277 GCC issue rate, but it changes the program flow so there are no other
13278 instructions to issue in this cycle. */
13279
13280 static int
13281 rs6000_variable_issue (stream, verbose, insn, more)
13282 FILE *stream ATTRIBUTE_UNUSED;
13283 int verbose ATTRIBUTE_UNUSED;
13284 rtx insn;
13285 int more;
13286 {
13287 if (GET_CODE (PATTERN (insn)) == USE
13288 || GET_CODE (PATTERN (insn)) == CLOBBER)
13289 return more;
13290
13291 if (rs6000_cpu == PROCESSOR_POWER4)
13292 {
13293 enum attr_type type = get_attr_type (insn);
13294 if (type == TYPE_LOAD_EXT_U || type == TYPE_LOAD_EXT_UX
13295 || type == TYPE_LOAD_UX || type == TYPE_STORE_UX
13296 || type == TYPE_MFCR)
13297 return 0;
13298 else if (type == TYPE_LOAD_U || type == TYPE_STORE_U
13299 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
13300 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
13301 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
13302 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
13303 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
13304 || type == TYPE_IDIV || type == TYPE_LDIV
13305 || type == TYPE_INSERT_WORD)
13306 return more > 2 ? more - 2 : 0;
13307 }
13308
13309 return more - 1;
13310 }
13311
13312 /* Adjust the cost of a scheduling dependency. Return the new cost of
13313 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
13314
13315 static int
13316 rs6000_adjust_cost (insn, link, dep_insn, cost)
13317 rtx insn;
13318 rtx link;
13319 rtx dep_insn ATTRIBUTE_UNUSED;
13320 int cost;
13321 {
13322 if (! recog_memoized (insn))
13323 return 0;
13324
13325 if (REG_NOTE_KIND (link) != 0)
13326 return 0;
13327
13328 if (REG_NOTE_KIND (link) == 0)
13329 {
13330 /* Data dependency; DEP_INSN writes a register that INSN reads
13331 some cycles later. */
13332 switch (get_attr_type (insn))
13333 {
13334 case TYPE_JMPREG:
13335 /* Tell the first scheduling pass about the latency between
13336 a mtctr and bctr (and mtlr and br/blr). The first
13337 scheduling pass will not know about this latency since
13338 the mtctr instruction, which has the latency associated
13339 to it, will be generated by reload. */
13340 return TARGET_POWER ? 5 : 4;
13341 case TYPE_BRANCH:
13342 /* Leave some extra cycles between a compare and its
13343 dependent branch, to inhibit expensive mispredicts. */
13344 if ((rs6000_cpu_attr == CPU_PPC603
13345 || rs6000_cpu_attr == CPU_PPC604
13346 || rs6000_cpu_attr == CPU_PPC604E
13347 || rs6000_cpu_attr == CPU_PPC620
13348 || rs6000_cpu_attr == CPU_PPC630
13349 || rs6000_cpu_attr == CPU_PPC750
13350 || rs6000_cpu_attr == CPU_PPC7400
13351 || rs6000_cpu_attr == CPU_PPC7450
13352 || rs6000_cpu_attr == CPU_POWER4)
13353 && recog_memoized (dep_insn)
13354 && (INSN_CODE (dep_insn) >= 0)
13355 && (get_attr_type (dep_insn) == TYPE_CMP
13356 || get_attr_type (dep_insn) == TYPE_COMPARE
13357 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
13358 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
13359 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
13360 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
13361 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
13362 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
13363 return cost + 2;
13364 default:
13365 break;
13366 }
13367 /* Fall out to return default cost. */
13368 }
13369
13370 return cost;
13371 }
13372
13373 /* A C statement (sans semicolon) to update the integer scheduling
13374 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
13375 INSN earlier, increase the priority to execute INSN later. Do not
13376 define this macro if you do not need to adjust the scheduling
13377 priorities of insns. */
13378
13379 static int
13380 rs6000_adjust_priority (insn, priority)
13381 rtx insn ATTRIBUTE_UNUSED;
13382 int priority;
13383 {
13384 /* On machines (like the 750) which have asymmetric integer units,
13385 where one integer unit can do multiply and divides and the other
13386 can't, reduce the priority of multiply/divide so it is scheduled
13387 before other integer operations. */
13388
13389 #if 0
13390 if (! INSN_P (insn))
13391 return priority;
13392
13393 if (GET_CODE (PATTERN (insn)) == USE)
13394 return priority;
13395
13396 switch (rs6000_cpu_attr) {
13397 case CPU_PPC750:
13398 switch (get_attr_type (insn))
13399 {
13400 default:
13401 break;
13402
13403 case TYPE_IMUL:
13404 case TYPE_IDIV:
13405 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
13406 priority, priority);
13407 if (priority >= 0 && priority < 0x01000000)
13408 priority >>= 3;
13409 break;
13410 }
13411 }
13412 #endif
13413
13414 return priority;
13415 }
13416
13417 /* Return how many instructions the machine can issue per cycle. */
13418
13419 static int
13420 rs6000_issue_rate ()
13421 {
13422 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
13423 if (!reload_completed)
13424 return 1;
13425
13426 switch (rs6000_cpu_attr) {
13427 case CPU_RIOS1: /* ? */
13428 case CPU_RS64A:
13429 case CPU_PPC601: /* ? */
13430 case CPU_PPC7450:
13431 return 3;
13432 case CPU_PPC440:
13433 case CPU_PPC603:
13434 case CPU_PPC750:
13435 case CPU_PPC7400:
13436 case CPU_PPC8540:
13437 return 2;
13438 case CPU_RIOS2:
13439 case CPU_PPC604:
13440 case CPU_PPC604E:
13441 case CPU_PPC620:
13442 case CPU_PPC630:
13443 case CPU_POWER4:
13444 return 4;
13445 default:
13446 return 1;
13447 }
13448 }
13449
13450 /* Return how many instructions to look ahead for better insn
13451 scheduling. */
13452
13453 static int
13454 rs6000_use_sched_lookahead ()
13455 {
13456 if (rs6000_cpu_attr == CPU_PPC8540)
13457 return 4;
13458 return 0;
13459 }
13460
13461 \f
13462 /* Length in units of the trampoline for entering a nested function. */
13463
13464 int
13465 rs6000_trampoline_size ()
13466 {
13467 int ret = 0;
13468
13469 switch (DEFAULT_ABI)
13470 {
13471 default:
13472 abort ();
13473
13474 case ABI_AIX:
13475 ret = (TARGET_32BIT) ? 12 : 24;
13476 break;
13477
13478 case ABI_DARWIN:
13479 case ABI_V4:
13480 ret = (TARGET_32BIT) ? 40 : 48;
13481 break;
13482 }
13483
13484 return ret;
13485 }
13486
13487 /* Emit RTL insns to initialize the variable parts of a trampoline.
13488 FNADDR is an RTX for the address of the function's pure code.
13489 CXT is an RTX for the static chain value for the function. */
13490
13491 void
13492 rs6000_initialize_trampoline (addr, fnaddr, cxt)
13493 rtx addr;
13494 rtx fnaddr;
13495 rtx cxt;
13496 {
13497 enum machine_mode pmode = Pmode;
13498 int regsize = (TARGET_32BIT) ? 4 : 8;
13499 rtx ctx_reg = force_reg (pmode, cxt);
13500
13501 switch (DEFAULT_ABI)
13502 {
13503 default:
13504 abort ();
13505
13506 /* Macros to shorten the code expansions below. */
13507 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
13508 #define MEM_PLUS(addr,offset) \
13509 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
13510
13511 /* Under AIX, just build the 3 word function descriptor */
13512 case ABI_AIX:
13513 {
13514 rtx fn_reg = gen_reg_rtx (pmode);
13515 rtx toc_reg = gen_reg_rtx (pmode);
13516 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
13517 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
13518 emit_move_insn (MEM_DEREF (addr), fn_reg);
13519 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
13520 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
13521 }
13522 break;
13523
13524 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
13525 case ABI_DARWIN:
13526 case ABI_V4:
13527 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
13528 FALSE, VOIDmode, 4,
13529 addr, pmode,
13530 GEN_INT (rs6000_trampoline_size ()), SImode,
13531 fnaddr, pmode,
13532 ctx_reg, pmode);
13533 break;
13534 }
13535
13536 return;
13537 }
13538
13539 \f
13540 /* Table of valid machine attributes. */
13541
13542 const struct attribute_spec rs6000_attribute_table[] =
13543 {
13544 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
13545 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
13546 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
13547 { NULL, 0, 0, false, false, false, NULL }
13548 };
13549
13550 /* Handle a "longcall" or "shortcall" attribute; arguments as in
13551 struct attribute_spec.handler. */
13552
13553 static tree
13554 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
13555 tree *node;
13556 tree name;
13557 tree args ATTRIBUTE_UNUSED;
13558 int flags ATTRIBUTE_UNUSED;
13559 bool *no_add_attrs;
13560 {
13561 if (TREE_CODE (*node) != FUNCTION_TYPE
13562 && TREE_CODE (*node) != FIELD_DECL
13563 && TREE_CODE (*node) != TYPE_DECL)
13564 {
13565 warning ("`%s' attribute only applies to functions",
13566 IDENTIFIER_POINTER (name));
13567 *no_add_attrs = true;
13568 }
13569
13570 return NULL_TREE;
13571 }
13572
13573 /* Set longcall attributes on all functions declared when
13574 rs6000_default_long_calls is true. */
13575 static void
13576 rs6000_set_default_type_attributes (type)
13577 tree type;
13578 {
13579 if (rs6000_default_long_calls
13580 && (TREE_CODE (type) == FUNCTION_TYPE
13581 || TREE_CODE (type) == METHOD_TYPE))
13582 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
13583 NULL_TREE,
13584 TYPE_ATTRIBUTES (type));
13585 }
13586
13587 /* Return a reference suitable for calling a function with the
13588 longcall attribute. */
13589
13590 struct rtx_def *
13591 rs6000_longcall_ref (call_ref)
13592 rtx call_ref;
13593 {
13594 const char *call_name;
13595 tree node;
13596
13597 if (GET_CODE (call_ref) != SYMBOL_REF)
13598 return call_ref;
13599
13600 /* System V adds '.' to the internal name, so skip them. */
13601 call_name = XSTR (call_ref, 0);
13602 if (*call_name == '.')
13603 {
13604 while (*call_name == '.')
13605 call_name++;
13606
13607 node = get_identifier (call_name);
13608 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
13609 }
13610
13611 return force_reg (Pmode, call_ref);
13612 }
13613 \f
13614 #ifdef USING_ELFOS_H
13615
13616 /* A C statement or statements to switch to the appropriate section
13617 for output of RTX in mode MODE. You can assume that RTX is some
13618 kind of constant in RTL. The argument MODE is redundant except in
13619 the case of a `const_int' rtx. Select the section by calling
13620 `text_section' or one of the alternatives for other sections.
13621
13622 Do not define this macro if you put all constants in the read-only
13623 data section. */
13624
13625 static void
13626 rs6000_elf_select_rtx_section (mode, x, align)
13627 enum machine_mode mode;
13628 rtx x;
13629 unsigned HOST_WIDE_INT align;
13630 {
13631 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
13632 toc_section ();
13633 else
13634 default_elf_select_rtx_section (mode, x, align);
13635 }
13636
13637 /* A C statement or statements to switch to the appropriate
13638 section for output of DECL. DECL is either a `VAR_DECL' node
13639 or a constant of some sort. RELOC indicates whether forming
13640 the initial value of DECL requires link-time relocations. */
13641
13642 static void
13643 rs6000_elf_select_section (decl, reloc, align)
13644 tree decl;
13645 int reloc;
13646 unsigned HOST_WIDE_INT align;
13647 {
13648 /* Pretend that we're always building for a shared library when
13649 ABI_AIX, because otherwise we end up with dynamic relocations
13650 in read-only sections. This happens for function pointers,
13651 references to vtables in typeinfo, and probably other cases. */
13652 default_elf_select_section_1 (decl, reloc, align,
13653 flag_pic || DEFAULT_ABI == ABI_AIX);
13654 }
13655
13656 /* A C statement to build up a unique section name, expressed as a
13657 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
13658 RELOC indicates whether the initial value of EXP requires
13659 link-time relocations. If you do not define this macro, GCC will use
13660 the symbol name prefixed by `.' as the section name. Note - this
13661 macro can now be called for uninitialized data items as well as
13662 initialized data and functions. */
13663
13664 static void
13665 rs6000_elf_unique_section (decl, reloc)
13666 tree decl;
13667 int reloc;
13668 {
13669 /* As above, pretend that we're always building for a shared library
13670 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
13671 default_unique_section_1 (decl, reloc,
13672 flag_pic || DEFAULT_ABI == ABI_AIX);
13673 }
13674 \f
13675 /* For a SYMBOL_REF, set generic flags and then perform some
13676 target-specific processing.
13677
13678 When the AIX ABI is requested on a non-AIX system, replace the
13679 function name with the real name (with a leading .) rather than the
13680 function descriptor name. This saves a lot of overriding code to
13681 read the prefixes. */
13682
13683 static void
13684 rs6000_elf_encode_section_info (decl, rtl, first)
13685 tree decl;
13686 rtx rtl;
13687 int first;
13688 {
13689 default_encode_section_info (decl, rtl, first);
13690
13691 if (first
13692 && TREE_CODE (decl) == FUNCTION_DECL
13693 && !TARGET_AIX
13694 && DEFAULT_ABI == ABI_AIX)
13695 {
13696 rtx sym_ref = XEXP (rtl, 0);
13697 size_t len = strlen (XSTR (sym_ref, 0));
13698 char *str = alloca (len + 2);
13699 str[0] = '.';
13700 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
13701 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
13702 }
13703 }
13704
13705 static bool
13706 rs6000_elf_in_small_data_p (decl)
13707 tree decl;
13708 {
13709 if (rs6000_sdata == SDATA_NONE)
13710 return false;
13711
13712 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
13713 {
13714 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
13715 if (strcmp (section, ".sdata") == 0
13716 || strcmp (section, ".sdata2") == 0
13717 || strcmp (section, ".sbss") == 0
13718 || strcmp (section, ".sbss2") == 0
13719 || strcmp (section, ".PPC.EMB.sdata0") == 0
13720 || strcmp (section, ".PPC.EMB.sbss0") == 0)
13721 return true;
13722 }
13723 else
13724 {
13725 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
13726
13727 if (size > 0
13728 && (unsigned HOST_WIDE_INT) size <= g_switch_value
13729 /* If it's not public, and we're not going to reference it there,
13730 there's no need to put it in the small data section. */
13731 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
13732 return true;
13733 }
13734
13735 return false;
13736 }
13737
13738 #endif /* USING_ELFOS_H */
13739
13740 \f
13741 /* Return a REG that occurs in ADDR with coefficient 1.
13742 ADDR can be effectively incremented by incrementing REG.
13743
13744 r0 is special and we must not select it as an address
13745 register by this routine since our caller will try to
13746 increment the returned register via an "la" instruction. */
13747
13748 struct rtx_def *
13749 find_addr_reg (addr)
13750 rtx addr;
13751 {
13752 while (GET_CODE (addr) == PLUS)
13753 {
13754 if (GET_CODE (XEXP (addr, 0)) == REG
13755 && REGNO (XEXP (addr, 0)) != 0)
13756 addr = XEXP (addr, 0);
13757 else if (GET_CODE (XEXP (addr, 1)) == REG
13758 && REGNO (XEXP (addr, 1)) != 0)
13759 addr = XEXP (addr, 1);
13760 else if (CONSTANT_P (XEXP (addr, 0)))
13761 addr = XEXP (addr, 1);
13762 else if (CONSTANT_P (XEXP (addr, 1)))
13763 addr = XEXP (addr, 0);
13764 else
13765 abort ();
13766 }
13767 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
13768 return addr;
13769 abort ();
13770 }
13771
13772 void
13773 rs6000_fatal_bad_address (op)
13774 rtx op;
13775 {
13776 fatal_insn ("bad address", op);
13777 }
13778
13779 #if TARGET_MACHO
13780
13781 #if 0
13782 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
13783 reference and a constant. */
13784
13785 int
13786 symbolic_operand (op)
13787 rtx op;
13788 {
13789 switch (GET_CODE (op))
13790 {
13791 case SYMBOL_REF:
13792 case LABEL_REF:
13793 return 1;
13794 case CONST:
13795 op = XEXP (op, 0);
13796 return (GET_CODE (op) == SYMBOL_REF ||
13797 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
13798 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
13799 && GET_CODE (XEXP (op, 1)) == CONST_INT);
13800 default:
13801 return 0;
13802 }
13803 }
13804 #endif
13805
13806 #ifdef RS6000_LONG_BRANCH
13807
13808 static tree stub_list = 0;
13809
13810 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
13811 procedure calls to the linked list. */
13812
13813 void
13814 add_compiler_stub (label_name, function_name, line_number)
13815 tree label_name;
13816 tree function_name;
13817 int line_number;
13818 {
13819 tree stub = build_tree_list (function_name, label_name);
13820 TREE_TYPE (stub) = build_int_2 (line_number, 0);
13821 TREE_CHAIN (stub) = stub_list;
13822 stub_list = stub;
13823 }
13824
13825 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
13826 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
13827 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
13828
13829 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
13830 handling procedure calls from the linked list and initializes the
13831 linked list. */
13832
13833 void
13834 output_compiler_stub ()
13835 {
13836 char tmp_buf[256];
13837 char label_buf[256];
13838 tree stub;
13839
13840 if (!flag_pic)
13841 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13842 {
13843 fprintf (asm_out_file,
13844 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
13845
13846 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13847 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
13848 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
13849 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13850
13851 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
13852 strcpy (label_buf,
13853 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
13854 else
13855 {
13856 label_buf[0] = '_';
13857 strcpy (label_buf+1,
13858 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
13859 }
13860
13861 strcpy (tmp_buf, "lis r12,hi16(");
13862 strcat (tmp_buf, label_buf);
13863 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
13864 strcat (tmp_buf, label_buf);
13865 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
13866 output_asm_insn (tmp_buf, 0);
13867
13868 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13869 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
13870 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
13871 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13872 }
13873
13874 stub_list = 0;
13875 }
13876
13877 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
13878 already there or not. */
13879
13880 int
13881 no_previous_def (function_name)
13882 tree function_name;
13883 {
13884 tree stub;
13885 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13886 if (function_name == STUB_FUNCTION_NAME (stub))
13887 return 0;
13888 return 1;
13889 }
13890
13891 /* GET_PREV_LABEL gets the label name from the previous definition of
13892 the function. */
13893
13894 tree
13895 get_prev_label (function_name)
13896 tree function_name;
13897 {
13898 tree stub;
13899 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13900 if (function_name == STUB_FUNCTION_NAME (stub))
13901 return STUB_LABEL_NAME (stub);
13902 return 0;
13903 }
13904
13905 /* INSN is either a function call or a millicode call. It may have an
13906 unconditional jump in its delay slot.
13907
13908 CALL_DEST is the routine we are calling. */
13909
13910 char *
13911 output_call (insn, call_dest, operand_number)
13912 rtx insn;
13913 rtx call_dest;
13914 int operand_number;
13915 {
13916 static char buf[256];
13917 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
13918 {
13919 tree labelname;
13920 tree funname = get_identifier (XSTR (call_dest, 0));
13921
13922 if (no_previous_def (funname))
13923 {
13924 int line_number = 0;
13925 rtx label_rtx = gen_label_rtx ();
13926 char *label_buf, temp_buf[256];
13927 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
13928 CODE_LABEL_NUMBER (label_rtx));
13929 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
13930 labelname = get_identifier (label_buf);
13931 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
13932 if (insn)
13933 line_number = NOTE_LINE_NUMBER (insn);
13934 add_compiler_stub (labelname, funname, line_number);
13935 }
13936 else
13937 labelname = get_prev_label (funname);
13938
13939 sprintf (buf, "jbsr %%z%d,%.246s",
13940 operand_number, IDENTIFIER_POINTER (labelname));
13941 return buf;
13942 }
13943 else
13944 {
13945 sprintf (buf, "bl %%z%d", operand_number);
13946 return buf;
13947 }
13948 }
13949
13950 #endif /* RS6000_LONG_BRANCH */
13951
13952 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
13953 do { \
13954 const char *const symbol_ = (SYMBOL); \
13955 char *buffer_ = (BUF); \
13956 if (symbol_[0] == '"') \
13957 { \
13958 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
13959 } \
13960 else if (name_needs_quotes(symbol_)) \
13961 { \
13962 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
13963 } \
13964 else \
13965 { \
13966 sprintf(buffer_, "L%d$%s", (N), symbol_); \
13967 } \
13968 } while (0)
13969
13970
13971 /* Generate PIC and indirect symbol stubs. */
13972
13973 void
13974 machopic_output_stub (file, symb, stub)
13975 FILE *file;
13976 const char *symb, *stub;
13977 {
13978 unsigned int length;
13979 char *symbol_name, *lazy_ptr_name;
13980 char *local_label_0;
13981 static int label = 0;
13982
13983 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
13984 symb = (*targetm.strip_name_encoding) (symb);
13985
13986 label += 1;
13987
13988 length = strlen (symb);
13989 symbol_name = alloca (length + 32);
13990 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
13991
13992 lazy_ptr_name = alloca (length + 32);
13993 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
13994
13995 local_label_0 = alloca (length + 32);
13996 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
13997
13998 if (flag_pic == 2)
13999 machopic_picsymbol_stub1_section ();
14000 else
14001 machopic_symbol_stub1_section ();
14002 fprintf (file, "\t.align 2\n");
14003
14004 fprintf (file, "%s:\n", stub);
14005 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
14006
14007 if (flag_pic == 2)
14008 {
14009 fprintf (file, "\tmflr r0\n");
14010 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
14011 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
14012 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
14013 lazy_ptr_name, local_label_0);
14014 fprintf (file, "\tmtlr r0\n");
14015 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
14016 lazy_ptr_name, local_label_0);
14017 fprintf (file, "\tmtctr r12\n");
14018 fprintf (file, "\tbctr\n");
14019 }
14020 else
14021 {
14022 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
14023 fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
14024 fprintf (file, "\tmtctr r12\n");
14025 fprintf (file, "\tbctr\n");
14026 }
14027
14028 machopic_lazy_symbol_ptr_section ();
14029 fprintf (file, "%s:\n", lazy_ptr_name);
14030 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
14031 fprintf (file, "\t.long dyld_stub_binding_helper\n");
14032 }
14033
14034 /* Legitimize PIC addresses. If the address is already
14035 position-independent, we return ORIG. Newly generated
14036 position-independent addresses go into a reg. This is REG if non
14037 zero, otherwise we allocate register(s) as necessary. */
14038
14039 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
14040
14041 rtx
14042 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
14043 rtx orig;
14044 enum machine_mode mode;
14045 rtx reg;
14046 {
14047 rtx base, offset;
14048
14049 if (reg == NULL && ! reload_in_progress && ! reload_completed)
14050 reg = gen_reg_rtx (Pmode);
14051
14052 if (GET_CODE (orig) == CONST)
14053 {
14054 if (GET_CODE (XEXP (orig, 0)) == PLUS
14055 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
14056 return orig;
14057
14058 if (GET_CODE (XEXP (orig, 0)) == PLUS)
14059 {
14060 base =
14061 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
14062 Pmode, reg);
14063 offset =
14064 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
14065 Pmode, reg);
14066 }
14067 else
14068 abort ();
14069
14070 if (GET_CODE (offset) == CONST_INT)
14071 {
14072 if (SMALL_INT (offset))
14073 return plus_constant (base, INTVAL (offset));
14074 else if (! reload_in_progress && ! reload_completed)
14075 offset = force_reg (Pmode, offset);
14076 else
14077 {
14078 rtx mem = force_const_mem (Pmode, orig);
14079 return machopic_legitimize_pic_address (mem, Pmode, reg);
14080 }
14081 }
14082 return gen_rtx (PLUS, Pmode, base, offset);
14083 }
14084
14085 /* Fall back on generic machopic code. */
14086 return machopic_legitimize_pic_address (orig, mode, reg);
14087 }
14088
14089 /* This is just a placeholder to make linking work without having to
14090 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
14091 ever needed for Darwin (not too likely!) this would have to get a
14092 real definition. */
14093
14094 void
14095 toc_section ()
14096 {
14097 }
14098
14099 #endif /* TARGET_MACHO */
14100
14101 #if TARGET_ELF
14102 static unsigned int
14103 rs6000_elf_section_type_flags (decl, name, reloc)
14104 tree decl;
14105 const char *name;
14106 int reloc;
14107 {
14108 unsigned int flags
14109 = default_section_type_flags_1 (decl, name, reloc,
14110 flag_pic || DEFAULT_ABI == ABI_AIX);
14111
14112 if (TARGET_RELOCATABLE)
14113 flags |= SECTION_WRITE;
14114
14115 return flags;
14116 }
14117
14118 /* Record an element in the table of global constructors. SYMBOL is
14119 a SYMBOL_REF of the function to be called; PRIORITY is a number
14120 between 0 and MAX_INIT_PRIORITY.
14121
14122 This differs from default_named_section_asm_out_constructor in
14123 that we have special handling for -mrelocatable. */
14124
14125 static void
14126 rs6000_elf_asm_out_constructor (symbol, priority)
14127 rtx symbol;
14128 int priority;
14129 {
14130 const char *section = ".ctors";
14131 char buf[16];
14132
14133 if (priority != DEFAULT_INIT_PRIORITY)
14134 {
14135 sprintf (buf, ".ctors.%.5u",
14136 /* Invert the numbering so the linker puts us in the proper
14137 order; constructors are run from right to left, and the
14138 linker sorts in increasing order. */
14139 MAX_INIT_PRIORITY - priority);
14140 section = buf;
14141 }
14142
14143 named_section_flags (section, SECTION_WRITE);
14144 assemble_align (POINTER_SIZE);
14145
14146 if (TARGET_RELOCATABLE)
14147 {
14148 fputs ("\t.long (", asm_out_file);
14149 output_addr_const (asm_out_file, symbol);
14150 fputs (")@fixup\n", asm_out_file);
14151 }
14152 else
14153 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
14154 }
14155
14156 static void
14157 rs6000_elf_asm_out_destructor (symbol, priority)
14158 rtx symbol;
14159 int priority;
14160 {
14161 const char *section = ".dtors";
14162 char buf[16];
14163
14164 if (priority != DEFAULT_INIT_PRIORITY)
14165 {
14166 sprintf (buf, ".dtors.%.5u",
14167 /* Invert the numbering so the linker puts us in the proper
14168 order; constructors are run from right to left, and the
14169 linker sorts in increasing order. */
14170 MAX_INIT_PRIORITY - priority);
14171 section = buf;
14172 }
14173
14174 named_section_flags (section, SECTION_WRITE);
14175 assemble_align (POINTER_SIZE);
14176
14177 if (TARGET_RELOCATABLE)
14178 {
14179 fputs ("\t.long (", asm_out_file);
14180 output_addr_const (asm_out_file, symbol);
14181 fputs (")@fixup\n", asm_out_file);
14182 }
14183 else
14184 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
14185 }
14186
14187 void
14188 rs6000_elf_declare_function_name (file, name, decl)
14189 FILE *file;
14190 const char *name;
14191 tree decl;
14192 {
14193 if (TARGET_64BIT)
14194 {
14195 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
14196 ASM_OUTPUT_LABEL (file, name);
14197 fputs (DOUBLE_INT_ASM_OP, file);
14198 putc ('.', file);
14199 assemble_name (file, name);
14200 fputs (",.TOC.@tocbase,0\n\t.previous\n\t.size\t", file);
14201 assemble_name (file, name);
14202 fputs (",24\n\t.type\t.", file);
14203 assemble_name (file, name);
14204 fputs (",@function\n", file);
14205 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
14206 {
14207 fputs ("\t.globl\t.", file);
14208 assemble_name (file, name);
14209 putc ('\n', file);
14210 }
14211 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
14212 putc ('.', file);
14213 ASM_OUTPUT_LABEL (file, name);
14214 return;
14215 }
14216
14217 if (TARGET_RELOCATABLE
14218 && (get_pool_size () != 0 || current_function_profile)
14219 && uses_TOC())
14220 {
14221 char buf[256];
14222
14223 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
14224
14225 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
14226 fprintf (file, "\t.long ");
14227 assemble_name (file, buf);
14228 putc ('-', file);
14229 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
14230 assemble_name (file, buf);
14231 putc ('\n', file);
14232 }
14233
14234 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
14235 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
14236
14237 if (DEFAULT_ABI == ABI_AIX)
14238 {
14239 const char *desc_name, *orig_name;
14240
14241 orig_name = (*targetm.strip_name_encoding) (name);
14242 desc_name = orig_name;
14243 while (*desc_name == '.')
14244 desc_name++;
14245
14246 if (TREE_PUBLIC (decl))
14247 fprintf (file, "\t.globl %s\n", desc_name);
14248
14249 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
14250 fprintf (file, "%s:\n", desc_name);
14251 fprintf (file, "\t.long %s\n", orig_name);
14252 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
14253 if (DEFAULT_ABI == ABI_AIX)
14254 fputs ("\t.long 0\n", file);
14255 fprintf (file, "\t.previous\n");
14256 }
14257 ASM_OUTPUT_LABEL (file, name);
14258 }
14259 #endif
14260
14261 #if TARGET_XCOFF
14262 static void
14263 rs6000_xcoff_asm_globalize_label (stream, name)
14264 FILE *stream;
14265 const char *name;
14266 {
14267 fputs (GLOBAL_ASM_OP, stream);
14268 RS6000_OUTPUT_BASENAME (stream, name);
14269 putc ('\n', stream);
14270 }
14271
14272 static void
14273 rs6000_xcoff_asm_named_section (name, flags)
14274 const char *name;
14275 unsigned int flags;
14276 {
14277 int smclass;
14278 static const char * const suffix[3] = { "PR", "RO", "RW" };
14279
14280 if (flags & SECTION_CODE)
14281 smclass = 0;
14282 else if (flags & SECTION_WRITE)
14283 smclass = 2;
14284 else
14285 smclass = 1;
14286
14287 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
14288 (flags & SECTION_CODE) ? "." : "",
14289 name, suffix[smclass], flags & SECTION_ENTSIZE);
14290 }
14291
14292 static void
14293 rs6000_xcoff_select_section (decl, reloc, align)
14294 tree decl;
14295 int reloc;
14296 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
14297 {
14298 if (decl_readonly_section_1 (decl, reloc, 1))
14299 {
14300 if (TREE_PUBLIC (decl))
14301 read_only_data_section ();
14302 else
14303 read_only_private_data_section ();
14304 }
14305 else
14306 {
14307 if (TREE_PUBLIC (decl))
14308 data_section ();
14309 else
14310 private_data_section ();
14311 }
14312 }
14313
14314 static void
14315 rs6000_xcoff_unique_section (decl, reloc)
14316 tree decl;
14317 int reloc ATTRIBUTE_UNUSED;
14318 {
14319 const char *name;
14320
14321 /* Use select_section for private and uninitialized data. */
14322 if (!TREE_PUBLIC (decl)
14323 || DECL_COMMON (decl)
14324 || DECL_INITIAL (decl) == NULL_TREE
14325 || DECL_INITIAL (decl) == error_mark_node
14326 || (flag_zero_initialized_in_bss
14327 && initializer_zerop (DECL_INITIAL (decl))))
14328 return;
14329
14330 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
14331 name = (*targetm.strip_name_encoding) (name);
14332 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
14333 }
14334
14335 /* Select section for constant in constant pool.
14336
14337 On RS/6000, all constants are in the private read-only data area.
14338 However, if this is being placed in the TOC it must be output as a
14339 toc entry. */
14340
14341 static void
14342 rs6000_xcoff_select_rtx_section (mode, x, align)
14343 enum machine_mode mode;
14344 rtx x;
14345 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
14346 {
14347 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
14348 toc_section ();
14349 else
14350 read_only_private_data_section ();
14351 }
14352
14353 /* Remove any trailing [DS] or the like from the symbol name. */
14354
14355 static const char *
14356 rs6000_xcoff_strip_name_encoding (name)
14357 const char *name;
14358 {
14359 size_t len;
14360 if (*name == '*')
14361 name++;
14362 len = strlen (name);
14363 if (name[len - 1] == ']')
14364 return ggc_alloc_string (name, len - 4);
14365 else
14366 return name;
14367 }
14368
14369 /* Section attributes. AIX is always PIC. */
14370
14371 static unsigned int
14372 rs6000_xcoff_section_type_flags (decl, name, reloc)
14373 tree decl;
14374 const char *name;
14375 int reloc;
14376 {
14377 unsigned int align;
14378 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
14379
14380 /* Align to at least UNIT size. */
14381 if (flags & SECTION_CODE)
14382 align = MIN_UNITS_PER_WORD;
14383 else
14384 /* Increase alignment of large objects if not already stricter. */
14385 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
14386 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
14387 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
14388
14389 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
14390 }
14391
14392 /* Output at beginning of assembler file.
14393
14394 Initialize the section names for the RS/6000 at this point.
14395
14396 Specify filename, including full path, to assembler.
14397
14398 We want to go into the TOC section so at least one .toc will be emitted.
14399 Also, in order to output proper .bs/.es pairs, we need at least one static
14400 [RW] section emitted.
14401
14402 Finally, declare mcount when profiling to make the assembler happy. */
14403
14404 static void
14405 rs6000_xcoff_file_start ()
14406 {
14407 rs6000_gen_section_name (&xcoff_bss_section_name,
14408 main_input_filename, ".bss_");
14409 rs6000_gen_section_name (&xcoff_private_data_section_name,
14410 main_input_filename, ".rw_");
14411 rs6000_gen_section_name (&xcoff_read_only_section_name,
14412 main_input_filename, ".ro_");
14413
14414 fputs ("\t.file\t", asm_out_file);
14415 output_quoted_string (asm_out_file, main_input_filename);
14416 fputc ('\n', asm_out_file);
14417 toc_section ();
14418 if (write_symbols != NO_DEBUG)
14419 private_data_section ();
14420 text_section ();
14421 if (profile_flag)
14422 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
14423 rs6000_file_start ();
14424 }
14425
14426 /* Output at end of assembler file.
14427 On the RS/6000, referencing data should automatically pull in text. */
14428
14429 static void
14430 rs6000_xcoff_file_end ()
14431 {
14432 text_section ();
14433 fputs ("_section_.text:\n", asm_out_file);
14434 data_section ();
14435 fputs (TARGET_32BIT
14436 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
14437 asm_out_file);
14438 }
14439 #endif /* TARGET_XCOFF */
14440
14441 #if TARGET_MACHO
14442 /* Cross-module name binding. Darwin does not support overriding
14443 functions at dynamic-link time. */
14444
14445 static bool
14446 rs6000_binds_local_p (decl)
14447 tree decl;
14448 {
14449 return default_binds_local_p_1 (decl, 0);
14450 }
14451 #endif
14452
14453 /* Compute a (partial) cost for rtx X. Return true if the complete
14454 cost has been computed, and false if subexpressions should be
14455 scanned. In either case, *TOTAL contains the cost result. */
14456
14457 static bool
14458 rs6000_rtx_costs (x, code, outer_code, total)
14459 rtx x;
14460 int code, outer_code ATTRIBUTE_UNUSED;
14461 int *total;
14462 {
14463 switch (code)
14464 {
14465 /* On the RS/6000, if it is valid in the insn, it is free.
14466 So this always returns 0. */
14467 case CONST_INT:
14468 case CONST:
14469 case LABEL_REF:
14470 case SYMBOL_REF:
14471 case CONST_DOUBLE:
14472 case HIGH:
14473 *total = 0;
14474 return true;
14475
14476 case PLUS:
14477 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
14478 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
14479 + 0x8000) >= 0x10000)
14480 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
14481 ? COSTS_N_INSNS (2)
14482 : COSTS_N_INSNS (1));
14483 return true;
14484
14485 case AND:
14486 case IOR:
14487 case XOR:
14488 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
14489 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
14490 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
14491 ? COSTS_N_INSNS (2)
14492 : COSTS_N_INSNS (1));
14493 return true;
14494
14495 case MULT:
14496 if (optimize_size)
14497 {
14498 *total = COSTS_N_INSNS (2);
14499 return true;
14500 }
14501 switch (rs6000_cpu)
14502 {
14503 case PROCESSOR_RIOS1:
14504 case PROCESSOR_PPC405:
14505 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14506 ? COSTS_N_INSNS (5)
14507 : (INTVAL (XEXP (x, 1)) >= -256
14508 && INTVAL (XEXP (x, 1)) <= 255)
14509 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
14510 return true;
14511
14512 case PROCESSOR_PPC440:
14513 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14514 ? COSTS_N_INSNS (3)
14515 : COSTS_N_INSNS (2));
14516 return true;
14517
14518 case PROCESSOR_RS64A:
14519 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14520 ? GET_MODE (XEXP (x, 1)) != DImode
14521 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
14522 : (INTVAL (XEXP (x, 1)) >= -256
14523 && INTVAL (XEXP (x, 1)) <= 255)
14524 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
14525 return true;
14526
14527 case PROCESSOR_RIOS2:
14528 case PROCESSOR_MPCCORE:
14529 case PROCESSOR_PPC604e:
14530 *total = COSTS_N_INSNS (2);
14531 return true;
14532
14533 case PROCESSOR_PPC601:
14534 *total = COSTS_N_INSNS (5);
14535 return true;
14536
14537 case PROCESSOR_PPC603:
14538 case PROCESSOR_PPC7400:
14539 case PROCESSOR_PPC750:
14540 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14541 ? COSTS_N_INSNS (5)
14542 : (INTVAL (XEXP (x, 1)) >= -256
14543 && INTVAL (XEXP (x, 1)) <= 255)
14544 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
14545 return true;
14546
14547 case PROCESSOR_PPC7450:
14548 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14549 ? COSTS_N_INSNS (4)
14550 : COSTS_N_INSNS (3));
14551 return true;
14552
14553 case PROCESSOR_PPC403:
14554 case PROCESSOR_PPC604:
14555 case PROCESSOR_PPC8540:
14556 *total = COSTS_N_INSNS (4);
14557 return true;
14558
14559 case PROCESSOR_PPC620:
14560 case PROCESSOR_PPC630:
14561 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14562 ? GET_MODE (XEXP (x, 1)) != DImode
14563 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
14564 : (INTVAL (XEXP (x, 1)) >= -256
14565 && INTVAL (XEXP (x, 1)) <= 255)
14566 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
14567 return true;
14568
14569 case PROCESSOR_POWER4:
14570 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14571 ? GET_MODE (XEXP (x, 1)) != DImode
14572 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
14573 : COSTS_N_INSNS (2));
14574 return true;
14575
14576 default:
14577 abort ();
14578 }
14579
14580 case DIV:
14581 case MOD:
14582 if (GET_CODE (XEXP (x, 1)) == CONST_INT
14583 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
14584 {
14585 *total = COSTS_N_INSNS (2);
14586 return true;
14587 }
14588 /* FALLTHRU */
14589
14590 case UDIV:
14591 case UMOD:
14592 switch (rs6000_cpu)
14593 {
14594 case PROCESSOR_RIOS1:
14595 *total = COSTS_N_INSNS (19);
14596 return true;
14597
14598 case PROCESSOR_RIOS2:
14599 *total = COSTS_N_INSNS (13);
14600 return true;
14601
14602 case PROCESSOR_RS64A:
14603 *total = (GET_MODE (XEXP (x, 1)) != DImode
14604 ? COSTS_N_INSNS (65)
14605 : COSTS_N_INSNS (67));
14606 return true;
14607
14608 case PROCESSOR_MPCCORE:
14609 *total = COSTS_N_INSNS (6);
14610 return true;
14611
14612 case PROCESSOR_PPC403:
14613 *total = COSTS_N_INSNS (33);
14614 return true;
14615
14616 case PROCESSOR_PPC405:
14617 *total = COSTS_N_INSNS (35);
14618 return true;
14619
14620 case PROCESSOR_PPC440:
14621 *total = COSTS_N_INSNS (34);
14622 return true;
14623
14624 case PROCESSOR_PPC601:
14625 *total = COSTS_N_INSNS (36);
14626 return true;
14627
14628 case PROCESSOR_PPC603:
14629 *total = COSTS_N_INSNS (37);
14630 return true;
14631
14632 case PROCESSOR_PPC604:
14633 case PROCESSOR_PPC604e:
14634 *total = COSTS_N_INSNS (20);
14635 return true;
14636
14637 case PROCESSOR_PPC620:
14638 case PROCESSOR_PPC630:
14639 *total = (GET_MODE (XEXP (x, 1)) != DImode
14640 ? COSTS_N_INSNS (21)
14641 : COSTS_N_INSNS (37));
14642 return true;
14643
14644 case PROCESSOR_PPC750:
14645 case PROCESSOR_PPC8540:
14646 case PROCESSOR_PPC7400:
14647 *total = COSTS_N_INSNS (19);
14648 return true;
14649
14650 case PROCESSOR_PPC7450:
14651 *total = COSTS_N_INSNS (23);
14652 return true;
14653
14654 case PROCESSOR_POWER4:
14655 *total = (GET_MODE (XEXP (x, 1)) != DImode
14656 ? COSTS_N_INSNS (18)
14657 : COSTS_N_INSNS (34));
14658 return true;
14659
14660 default:
14661 abort ();
14662 }
14663
14664 case FFS:
14665 *total = COSTS_N_INSNS (4);
14666 return true;
14667
14668 case MEM:
14669 /* MEM should be slightly more expensive than (plus (reg) (const)) */
14670 *total = 5;
14671 return true;
14672
14673 default:
14674 return false;
14675 }
14676 }
14677
14678 /* A C expression returning the cost of moving data from a register of class
14679 CLASS1 to one of CLASS2. */
14680
14681 int
14682 rs6000_register_move_cost (mode, from, to)
14683 enum machine_mode mode;
14684 enum reg_class from, to;
14685 {
14686 /* Moves from/to GENERAL_REGS. */
14687 if (reg_classes_intersect_p (to, GENERAL_REGS)
14688 || reg_classes_intersect_p (from, GENERAL_REGS))
14689 {
14690 if (! reg_classes_intersect_p (to, GENERAL_REGS))
14691 from = to;
14692
14693 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
14694 return (rs6000_memory_move_cost (mode, from, 0)
14695 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
14696
14697 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift...*/
14698 else if (from == CR_REGS)
14699 return 4;
14700
14701 else
14702 /* A move will cost one instruction per GPR moved. */
14703 return 2 * HARD_REGNO_NREGS (0, mode);
14704 }
14705
14706 /* Moving between two similar registers is just one instruction. */
14707 else if (reg_classes_intersect_p (to, from))
14708 return mode == TFmode ? 4 : 2;
14709
14710 /* Everything else has to go through GENERAL_REGS. */
14711 else
14712 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
14713 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
14714 }
14715
14716 /* A C expressions returning the cost of moving data of MODE from a register to
14717 or from memory. */
14718
14719 int
14720 rs6000_memory_move_cost (mode, class, in)
14721 enum machine_mode mode;
14722 enum reg_class class;
14723 int in ATTRIBUTE_UNUSED;
14724 {
14725 if (reg_classes_intersect_p (class, GENERAL_REGS))
14726 return 4 * HARD_REGNO_NREGS (0, mode);
14727 else if (reg_classes_intersect_p (class, FLOAT_REGS))
14728 return 4 * HARD_REGNO_NREGS (32, mode);
14729 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
14730 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
14731 else
14732 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
14733 }
14734
14735 /* Return an RTX representing where to find the function value of a
14736 function returning MODE. */
14737 static rtx
14738 rs6000_complex_function_value (enum machine_mode mode)
14739 {
14740 unsigned int regno;
14741 rtx r1, r2;
14742 enum machine_mode inner = GET_MODE_INNER (mode);
14743
14744 if (FLOAT_MODE_P (mode))
14745 regno = FP_ARG_RETURN;
14746 else
14747 {
14748 regno = GP_ARG_RETURN;
14749
14750 /* 32-bit is OK since it'll go in r3/r4. */
14751 if (TARGET_32BIT
14752 && GET_MODE_BITSIZE (inner) >= 32)
14753 return gen_rtx_REG (mode, regno);
14754 }
14755
14756 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
14757 const0_rtx);
14758 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
14759 GEN_INT (GET_MODE_UNIT_SIZE (inner)));
14760 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
14761 }
14762
14763 /* Define how to find the value returned by a function.
14764 VALTYPE is the data type of the value (as a tree).
14765 If the precise function being called is known, FUNC is its FUNCTION_DECL;
14766 otherwise, FUNC is 0.
14767
14768 On the SPE, both FPs and vectors are returned in r3.
14769
14770 On RS/6000 an integer value is in r3 and a floating-point value is in
14771 fp1, unless -msoft-float. */
14772
14773 rtx
14774 rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
14775 {
14776 enum machine_mode mode;
14777 unsigned int regno;
14778
14779 if ((INTEGRAL_TYPE_P (valtype)
14780 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
14781 || POINTER_TYPE_P (valtype))
14782 mode = word_mode;
14783 else
14784 mode = TYPE_MODE (valtype);
14785
14786 if (TREE_CODE (valtype) == REAL_TYPE && TARGET_HARD_FLOAT && TARGET_FPRS)
14787 regno = FP_ARG_RETURN;
14788 else if (TREE_CODE (valtype) == COMPLEX_TYPE
14789 && TARGET_HARD_FLOAT
14790 && SPLIT_COMPLEX_ARGS)
14791 return rs6000_complex_function_value (mode);
14792 else if (TREE_CODE (valtype) == VECTOR_TYPE && TARGET_ALTIVEC)
14793 regno = ALTIVEC_ARG_RETURN;
14794 else
14795 regno = GP_ARG_RETURN;
14796
14797 return gen_rtx_REG (mode, regno);
14798 }
14799
14800 /* Define how to find the value returned by a library function
14801 assuming the value has mode MODE. */
14802 rtx
14803 rs6000_libcall_value (enum machine_mode mode)
14804 {
14805 unsigned int regno;
14806
14807 if (GET_MODE_CLASS (mode) == MODE_FLOAT
14808 && TARGET_HARD_FLOAT && TARGET_FPRS)
14809 regno = FP_ARG_RETURN;
14810 else if (ALTIVEC_VECTOR_MODE (mode))
14811 regno = ALTIVEC_ARG_RETURN;
14812 else if (COMPLEX_MODE_P (mode) && SPLIT_COMPLEX_ARGS)
14813 return rs6000_complex_function_value (mode);
14814 else
14815 regno = GP_ARG_RETURN;
14816
14817 return gen_rtx_REG (mode, regno);
14818 }
14819
14820 /* Return true if TYPE is of type __ev64_opaque__. */
14821
14822 static bool
14823 is_ev64_opaque_type (type)
14824 tree type;
14825 {
14826 return (TARGET_SPE
14827 && (type == opaque_V2SI_type_node
14828 || type == opaque_V2SF_type_node
14829 || type == opaque_p_V2SI_type_node
14830 || (TREE_CODE (type) == VECTOR_TYPE
14831 && TYPE_NAME (type)
14832 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
14833 && DECL_NAME (TYPE_NAME (type))
14834 && strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))),
14835 "__ev64_opaque__") == 0)));
14836 }
14837
14838 static rtx
14839 rs6000_dwarf_register_span (reg)
14840 rtx reg;
14841 {
14842 unsigned regno;
14843
14844 if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
14845 return NULL_RTX;
14846
14847 regno = REGNO (reg);
14848
14849 /* The duality of the SPE register size wreaks all kinds of havoc.
14850 This is a way of distinguishing r0 in 32-bits from r0 in
14851 64-bits. */
14852 return
14853 gen_rtx_PARALLEL (VOIDmode,
14854 BYTES_BIG_ENDIAN
14855 ? gen_rtvec (2,
14856 gen_rtx_REG (SImode, regno + 1200),
14857 gen_rtx_REG (SImode, regno))
14858 : gen_rtvec (2,
14859 gen_rtx_REG (SImode, regno),
14860 gen_rtx_REG (SImode, regno + 1200)));
14861 }
14862
14863 #include "gt-rs6000.h"