invoke.texi (RS/6000 and PowerPC Options): Document -mspe option.
[gcc.git] / gcc / config / rs6000 / rs6000.c
1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
5
6 This file is part of GNU CC.
7
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
12
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
34 #include "flags.h"
35 #include "recog.h"
36 #include "obstack.h"
37 #include "tree.h"
38 #include "expr.h"
39 #include "optabs.h"
40 #include "except.h"
41 #include "function.h"
42 #include "output.h"
43 #include "basic-block.h"
44 #include "integrate.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "hashtab.h"
48 #include "tm_p.h"
49 #include "target.h"
50 #include "target-def.h"
51 #include "langhooks.h"
52 #include "reload.h"
53
54 #ifndef TARGET_NO_PROTOTYPE
55 #define TARGET_NO_PROTOTYPE 0
56 #endif
57
58 #define EASY_VECTOR_15(n, x, y) ((n) >= -16 && (n) <= 15 \
59 && easy_vector_same (x, y))
60
61 #define EASY_VECTOR_15_ADD_SELF(n, x, y) ((n) >= 0x10 && (n) <= 0x1e \
62 && !((n) & 1) \
63 && easy_vector_same (x, y))
64
65 #define min(A,B) ((A) < (B) ? (A) : (B))
66 #define max(A,B) ((A) > (B) ? (A) : (B))
67
68 /* Target cpu type */
69
70 enum processor_type rs6000_cpu;
71 struct rs6000_cpu_select rs6000_select[3] =
72 {
73 /* switch name, tune arch */
74 { (const char *)0, "--with-cpu=", 1, 1 },
75 { (const char *)0, "-mcpu=", 1, 1 },
76 { (const char *)0, "-mtune=", 1, 0 },
77 };
78
79 /* Size of long double */
80 const char *rs6000_long_double_size_string;
81 int rs6000_long_double_type_size;
82
83 /* Whether -mabi=altivec has appeared */
84 int rs6000_altivec_abi;
85
86 /* Whether VRSAVE instructions should be generated. */
87 int rs6000_altivec_vrsave;
88
89 /* String from -mvrsave= option. */
90 const char *rs6000_altivec_vrsave_string;
91
92 /* Nonzero if we want SPE ABI extensions. */
93 int rs6000_spe_abi;
94
95 /* Whether isel instructions should be generated. */
96 int rs6000_isel;
97
98 /* Whether SPE simd instructions should be generated. */
99 int rs6000_spe;
100
101 /* Nonzero if we have FPRs. */
102 int rs6000_fprs = 1;
103
104 /* String from -misel=. */
105 const char *rs6000_isel_string;
106
107 /* String from -mspe=. */
108 const char *rs6000_spe_string;
109
110 /* Set to nonzero once AIX common-mode calls have been defined. */
111 static GTY(()) int common_mode_defined;
112
113 /* Save information from a "cmpxx" operation until the branch or scc is
114 emitted. */
115 rtx rs6000_compare_op0, rs6000_compare_op1;
116 int rs6000_compare_fp_p;
117
118 /* Label number of label created for -mrelocatable, to call to so we can
119 get the address of the GOT section */
120 int rs6000_pic_labelno;
121
122 #ifdef USING_ELFOS_H
123 /* Which abi to adhere to */
124 const char *rs6000_abi_name = RS6000_ABI_NAME;
125
126 /* Semantics of the small data area */
127 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
128
129 /* Which small data model to use */
130 const char *rs6000_sdata_name = (char *)0;
131
132 /* Counter for labels which are to be placed in .fixup. */
133 int fixuplabelno = 0;
134 #endif
135
136 /* ABI enumeration available for subtarget to use. */
137 enum rs6000_abi rs6000_current_abi;
138
139 /* ABI string from -mabi= option. */
140 const char *rs6000_abi_string;
141
142 /* Debug flags */
143 const char *rs6000_debug_name;
144 int rs6000_debug_stack; /* debug stack applications */
145 int rs6000_debug_arg; /* debug argument handling */
146
147 const char *rs6000_traceback_name;
148 static enum {
149 traceback_default = 0,
150 traceback_none,
151 traceback_part,
152 traceback_full
153 } rs6000_traceback;
154
155 /* Flag to say the TOC is initialized */
156 int toc_initialized;
157 char toc_label_name[10];
158
159 /* Alias set for saves and restores from the rs6000 stack. */
160 static int rs6000_sr_alias_set;
161
162 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
163 The only place that looks at this is rs6000_set_default_type_attributes;
164 everywhere else should rely on the presence or absence of a longcall
165 attribute on the function declaration. */
166 int rs6000_default_long_calls;
167 const char *rs6000_longcall_switch;
168
169 struct builtin_description
170 {
171 /* mask is not const because we're going to alter it below. This
172 nonsense will go away when we rewrite the -march infrastructure
173 to give us more target flag bits. */
174 unsigned int mask;
175 const enum insn_code icode;
176 const char *const name;
177 const enum rs6000_builtins code;
178 };
179
180 static bool rs6000_function_ok_for_sibcall PARAMS ((tree, tree));
181 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
182 static void validate_condition_mode
183 PARAMS ((enum rtx_code, enum machine_mode));
184 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
185 static void rs6000_maybe_dead PARAMS ((rtx));
186 static void rs6000_emit_stack_tie PARAMS ((void));
187 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
188 static rtx spe_synthesize_frame_save PARAMS ((rtx));
189 static bool spe_func_has_64bit_regs_p PARAMS ((void));
190 static void emit_frame_save PARAMS ((rtx, rtx, enum machine_mode,
191 unsigned int, int, int));
192 static rtx gen_frame_mem_offset PARAMS ((enum machine_mode, rtx, int));
193 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
194 static unsigned rs6000_hash_constant PARAMS ((rtx));
195 static unsigned toc_hash_function PARAMS ((const void *));
196 static int toc_hash_eq PARAMS ((const void *, const void *));
197 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
198 static struct machine_function * rs6000_init_machine_status PARAMS ((void));
199 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
200 #ifdef HAVE_GAS_HIDDEN
201 static void rs6000_assemble_visibility PARAMS ((tree, int));
202 #endif
203 static int rs6000_ra_ever_killed PARAMS ((void));
204 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
205 const struct attribute_spec rs6000_attribute_table[];
206 static void rs6000_set_default_type_attributes PARAMS ((tree));
207 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
208 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
209 static void rs6000_output_mi_thunk PARAMS ((FILE *, tree, HOST_WIDE_INT,
210 HOST_WIDE_INT, tree));
211 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
212 HOST_WIDE_INT, HOST_WIDE_INT));
213 #if TARGET_ELF
214 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
215 int));
216 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
217 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
218 static void rs6000_elf_select_section PARAMS ((tree, int,
219 unsigned HOST_WIDE_INT));
220 static void rs6000_elf_unique_section PARAMS ((tree, int));
221 static void rs6000_elf_select_rtx_section PARAMS ((enum machine_mode, rtx,
222 unsigned HOST_WIDE_INT));
223 static void rs6000_elf_encode_section_info PARAMS ((tree, int))
224 ATTRIBUTE_UNUSED;
225 static const char *rs6000_elf_strip_name_encoding PARAMS ((const char *));
226 static bool rs6000_elf_in_small_data_p PARAMS ((tree));
227 #endif
228 #if TARGET_XCOFF
229 static void rs6000_xcoff_asm_globalize_label PARAMS ((FILE *, const char *));
230 static void rs6000_xcoff_asm_named_section PARAMS ((const char *, unsigned int));
231 static void rs6000_xcoff_select_section PARAMS ((tree, int,
232 unsigned HOST_WIDE_INT));
233 static void rs6000_xcoff_unique_section PARAMS ((tree, int));
234 static void rs6000_xcoff_select_rtx_section PARAMS ((enum machine_mode, rtx,
235 unsigned HOST_WIDE_INT));
236 static const char * rs6000_xcoff_strip_name_encoding PARAMS ((const char *));
237 static unsigned int rs6000_xcoff_section_type_flags PARAMS ((tree, const char *, int));
238 static void rs6000_xcoff_encode_section_info PARAMS ((tree, int))
239 ATTRIBUTE_UNUSED;
240 #endif
241 #if TARGET_MACHO
242 static bool rs6000_binds_local_p PARAMS ((tree));
243 #endif
244 static int rs6000_use_dfa_pipeline_interface PARAMS ((void));
245 static int rs6000_variable_issue PARAMS ((FILE *, int, rtx, int));
246 static bool rs6000_rtx_costs PARAMS ((rtx, int, int, int *));
247 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
248 static int rs6000_adjust_priority PARAMS ((rtx, int));
249 static int rs6000_issue_rate PARAMS ((void));
250 static int rs6000_use_sched_lookahead PARAMS ((void));
251
252 static void rs6000_init_builtins PARAMS ((void));
253 static rtx rs6000_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
254 static rtx rs6000_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
255 static rtx rs6000_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
256 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
257 static void altivec_init_builtins PARAMS ((void));
258 static void rs6000_common_init_builtins PARAMS ((void));
259
260 static void enable_mask_for_builtins PARAMS ((struct builtin_description *,
261 int, enum rs6000_builtins,
262 enum rs6000_builtins));
263 static void spe_init_builtins PARAMS ((void));
264 static rtx spe_expand_builtin PARAMS ((tree, rtx, bool *));
265 static rtx spe_expand_predicate_builtin PARAMS ((enum insn_code, tree, rtx));
266 static rtx spe_expand_evsel_builtin PARAMS ((enum insn_code, tree, rtx));
267 static int rs6000_emit_int_cmove PARAMS ((rtx, rtx, rtx, rtx));
268
269 static rtx altivec_expand_builtin PARAMS ((tree, rtx, bool *));
270 static rtx altivec_expand_ld_builtin PARAMS ((tree, rtx, bool *));
271 static rtx altivec_expand_st_builtin PARAMS ((tree, rtx, bool *));
272 static rtx altivec_expand_dst_builtin PARAMS ((tree, rtx, bool *));
273 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
274 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
275 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
276 static void rs6000_parse_abi_options PARAMS ((void));
277 static void rs6000_parse_vrsave_option PARAMS ((void));
278 static void rs6000_parse_isel_option PARAMS ((void));
279 static void rs6000_parse_spe_option (void);
280 static int first_altivec_reg_to_save PARAMS ((void));
281 static unsigned int compute_vrsave_mask PARAMS ((void));
282 static void is_altivec_return_reg PARAMS ((rtx, void *));
283 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
284 int easy_vector_constant PARAMS ((rtx, enum machine_mode));
285 static int easy_vector_same PARAMS ((rtx, enum machine_mode));
286 static bool is_ev64_opaque_type PARAMS ((tree));
287 static rtx rs6000_dwarf_register_span PARAMS ((rtx));
288
289 /* Hash table stuff for keeping track of TOC entries. */
290
291 struct toc_hash_struct GTY(())
292 {
293 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
294 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
295 rtx key;
296 enum machine_mode key_mode;
297 int labelno;
298 };
299
300 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
301 \f
302 /* Default register names. */
303 char rs6000_reg_names[][8] =
304 {
305 "0", "1", "2", "3", "4", "5", "6", "7",
306 "8", "9", "10", "11", "12", "13", "14", "15",
307 "16", "17", "18", "19", "20", "21", "22", "23",
308 "24", "25", "26", "27", "28", "29", "30", "31",
309 "0", "1", "2", "3", "4", "5", "6", "7",
310 "8", "9", "10", "11", "12", "13", "14", "15",
311 "16", "17", "18", "19", "20", "21", "22", "23",
312 "24", "25", "26", "27", "28", "29", "30", "31",
313 "mq", "lr", "ctr","ap",
314 "0", "1", "2", "3", "4", "5", "6", "7",
315 "xer",
316 /* AltiVec registers. */
317 "0", "1", "2", "3", "4", "5", "6", "7",
318 "8", "9", "10", "11", "12", "13", "14", "15",
319 "16", "17", "18", "19", "20", "21", "22", "23",
320 "24", "25", "26", "27", "28", "29", "30", "31",
321 "vrsave", "vscr",
322 /* SPE registers. */
323 "spe_acc", "spefscr"
324 };
325
326 #ifdef TARGET_REGNAMES
327 static const char alt_reg_names[][8] =
328 {
329 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
330 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
331 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
332 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
333 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
334 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
335 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
336 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
337 "mq", "lr", "ctr", "ap",
338 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
339 "xer",
340 /* AltiVec registers. */
341 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
342 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
343 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
344 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
345 "vrsave", "vscr",
346 /* SPE registers. */
347 "spe_acc", "spefscr"
348 };
349 #endif
350 \f
351 #ifndef MASK_STRICT_ALIGN
352 #define MASK_STRICT_ALIGN 0
353 #endif
354 #ifndef TARGET_PROFILE_KERNEL
355 #define TARGET_PROFILE_KERNEL 0
356 #endif
357
358 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
359 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
360 \f
361 /* Initialize the GCC target structure. */
362 #undef TARGET_ATTRIBUTE_TABLE
363 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
364 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
365 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
366
367 #undef TARGET_ASM_ALIGNED_DI_OP
368 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
369
370 /* Default unaligned ops are only provided for ELF. Find the ops needed
371 for non-ELF systems. */
372 #ifndef OBJECT_FORMAT_ELF
373 #if TARGET_XCOFF
374 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
375 64-bit targets. */
376 #undef TARGET_ASM_UNALIGNED_HI_OP
377 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
378 #undef TARGET_ASM_UNALIGNED_SI_OP
379 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
380 #undef TARGET_ASM_UNALIGNED_DI_OP
381 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
382 #else
383 /* For Darwin. */
384 #undef TARGET_ASM_UNALIGNED_HI_OP
385 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
386 #undef TARGET_ASM_UNALIGNED_SI_OP
387 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
388 #endif
389 #endif
390
391 /* This hook deals with fixups for relocatable code and DI-mode objects
392 in 64-bit code. */
393 #undef TARGET_ASM_INTEGER
394 #define TARGET_ASM_INTEGER rs6000_assemble_integer
395
396 #ifdef HAVE_GAS_HIDDEN
397 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
398 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
399 #endif
400
401 #undef TARGET_ASM_FUNCTION_PROLOGUE
402 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
403 #undef TARGET_ASM_FUNCTION_EPILOGUE
404 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
405
406 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
407 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
408 #undef TARGET_SCHED_VARIABLE_ISSUE
409 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
410
411 #undef TARGET_SCHED_ISSUE_RATE
412 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
413 #undef TARGET_SCHED_ADJUST_COST
414 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
415 #undef TARGET_SCHED_ADJUST_PRIORITY
416 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
417
418 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
419 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
420
421 #undef TARGET_INIT_BUILTINS
422 #define TARGET_INIT_BUILTINS rs6000_init_builtins
423
424 #undef TARGET_EXPAND_BUILTIN
425 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
426
427 #if TARGET_MACHO
428 #undef TARGET_BINDS_LOCAL_P
429 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
430 #endif
431
432 #undef TARGET_ASM_OUTPUT_MI_THUNK
433 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
434
435 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
436 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
437
438 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
439 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
440
441 #undef TARGET_RTX_COSTS
442 #define TARGET_RTX_COSTS rs6000_rtx_costs
443 #undef TARGET_ADDRESS_COST
444 #define TARGET_ADDRESS_COST hook_int_rtx_0
445
446 #undef TARGET_VECTOR_OPAQUE_P
447 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
448
449 #undef TARGET_DWARF_REGISTER_SPAN
450 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
451
452 struct gcc_target targetm = TARGET_INITIALIZER;
453 \f
454 /* Override command line options. Mostly we process the processor
455 type and sometimes adjust other TARGET_ options. */
456
457 void
458 rs6000_override_options (default_cpu)
459 const char *default_cpu;
460 {
461 size_t i, j;
462 struct rs6000_cpu_select *ptr;
463
464 /* Simplify the entries below by making a mask for any POWER
465 variant and any PowerPC variant. */
466
467 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
468 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
469 | MASK_PPC_GFXOPT | MASK_POWERPC64)
470 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
471
472 static struct ptt
473 {
474 const char *const name; /* Canonical processor name. */
475 const enum processor_type processor; /* Processor type enum value. */
476 const int target_enable; /* Target flags to enable. */
477 const int target_disable; /* Target flags to disable. */
478 } const processor_target_table[]
479 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
480 POWER_MASKS | POWERPC_MASKS},
481 {"power", PROCESSOR_POWER,
482 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
483 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
484 {"power2", PROCESSOR_POWER,
485 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
486 POWERPC_MASKS | MASK_NEW_MNEMONICS},
487 {"power3", PROCESSOR_PPC630,
488 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
489 POWER_MASKS},
490 {"power4", PROCESSOR_POWER4,
491 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
492 POWER_MASKS},
493 {"powerpc", PROCESSOR_POWERPC,
494 MASK_POWERPC | MASK_NEW_MNEMONICS,
495 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
496 {"powerpc64", PROCESSOR_POWERPC64,
497 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
498 POWER_MASKS | POWERPC_OPT_MASKS},
499 {"rios", PROCESSOR_RIOS1,
500 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
501 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
502 {"rios1", PROCESSOR_RIOS1,
503 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
504 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
505 {"rsc", PROCESSOR_PPC601,
506 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
507 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
508 {"rsc1", PROCESSOR_PPC601,
509 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
510 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
511 {"rios2", PROCESSOR_RIOS2,
512 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
513 POWERPC_MASKS | MASK_NEW_MNEMONICS},
514 {"rs64a", PROCESSOR_RS64A,
515 MASK_POWERPC | MASK_NEW_MNEMONICS,
516 POWER_MASKS | POWERPC_OPT_MASKS},
517 {"401", PROCESSOR_PPC403,
518 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
519 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
520 {"403", PROCESSOR_PPC403,
521 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
522 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
523 {"405", PROCESSOR_PPC405,
524 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
525 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
526 {"405f", PROCESSOR_PPC405,
527 MASK_POWERPC | MASK_NEW_MNEMONICS,
528 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
529 {"505", PROCESSOR_MPCCORE,
530 MASK_POWERPC | MASK_NEW_MNEMONICS,
531 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
532 {"601", PROCESSOR_PPC601,
533 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
534 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
535 {"602", PROCESSOR_PPC603,
536 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
537 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
538 {"603", PROCESSOR_PPC603,
539 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
540 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
541 {"603e", PROCESSOR_PPC603,
542 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
543 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
544 {"ec603e", PROCESSOR_PPC603,
545 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
546 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
547 {"604", PROCESSOR_PPC604,
548 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
549 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
550 {"604e", PROCESSOR_PPC604e,
551 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
552 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
553 {"620", PROCESSOR_PPC620,
554 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
555 POWER_MASKS},
556 {"630", PROCESSOR_PPC630,
557 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
558 POWER_MASKS},
559 {"740", PROCESSOR_PPC750,
560 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
561 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
562 {"750", PROCESSOR_PPC750,
563 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
564 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
565 {"7400", PROCESSOR_PPC7400,
566 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
567 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
568 {"7450", PROCESSOR_PPC7450,
569 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
570 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
571 {"8540", PROCESSOR_PPC8540,
572 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
573 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
574 {"801", PROCESSOR_MPCCORE,
575 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
576 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
577 {"821", PROCESSOR_MPCCORE,
578 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
579 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
580 {"823", PROCESSOR_MPCCORE,
581 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
582 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
583 {"860", PROCESSOR_MPCCORE,
584 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
585 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
586
587 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
588
589 /* Save current -mmultiple/-mno-multiple status. */
590 int multiple = TARGET_MULTIPLE;
591 /* Save current -mstring/-mno-string status. */
592 int string = TARGET_STRING;
593
594 /* Identify the processor type. */
595 rs6000_select[0].string = default_cpu;
596 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
597
598 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
599 {
600 ptr = &rs6000_select[i];
601 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
602 {
603 for (j = 0; j < ptt_size; j++)
604 if (! strcmp (ptr->string, processor_target_table[j].name))
605 {
606 if (ptr->set_tune_p)
607 rs6000_cpu = processor_target_table[j].processor;
608
609 if (ptr->set_arch_p)
610 {
611 target_flags |= processor_target_table[j].target_enable;
612 target_flags &= ~processor_target_table[j].target_disable;
613 }
614 break;
615 }
616
617 if (j == ptt_size)
618 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
619 }
620 }
621
622 if (TARGET_E500)
623 rs6000_isel = 1;
624
625 /* If we are optimizing big endian systems for space, use the load/store
626 multiple and string instructions. */
627 if (BYTES_BIG_ENDIAN && optimize_size)
628 target_flags |= MASK_MULTIPLE | MASK_STRING;
629
630 /* If -mmultiple or -mno-multiple was explicitly used, don't
631 override with the processor default */
632 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
633 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
634
635 /* If -mstring or -mno-string was explicitly used, don't override
636 with the processor default. */
637 if ((target_flags_explicit & MASK_STRING) != 0)
638 target_flags = (target_flags & ~MASK_STRING) | string;
639
640 /* Don't allow -mmultiple or -mstring on little endian systems
641 unless the cpu is a 750, because the hardware doesn't support the
642 instructions used in little endian mode, and causes an alignment
643 trap. The 750 does not cause an alignment trap (except when the
644 target is unaligned). */
645
646 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
647 {
648 if (TARGET_MULTIPLE)
649 {
650 target_flags &= ~MASK_MULTIPLE;
651 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
652 warning ("-mmultiple is not supported on little endian systems");
653 }
654
655 if (TARGET_STRING)
656 {
657 target_flags &= ~MASK_STRING;
658 if ((target_flags_explicit & MASK_STRING) != 0)
659 warning ("-mstring is not supported on little endian systems");
660 }
661 }
662
663 /* Set debug flags */
664 if (rs6000_debug_name)
665 {
666 if (! strcmp (rs6000_debug_name, "all"))
667 rs6000_debug_stack = rs6000_debug_arg = 1;
668 else if (! strcmp (rs6000_debug_name, "stack"))
669 rs6000_debug_stack = 1;
670 else if (! strcmp (rs6000_debug_name, "arg"))
671 rs6000_debug_arg = 1;
672 else
673 error ("unknown -mdebug-%s switch", rs6000_debug_name);
674 }
675
676 if (rs6000_traceback_name)
677 {
678 if (! strncmp (rs6000_traceback_name, "full", 4))
679 rs6000_traceback = traceback_full;
680 else if (! strncmp (rs6000_traceback_name, "part", 4))
681 rs6000_traceback = traceback_part;
682 else if (! strncmp (rs6000_traceback_name, "no", 2))
683 rs6000_traceback = traceback_none;
684 else
685 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
686 rs6000_traceback_name);
687 }
688
689 /* Set size of long double */
690 rs6000_long_double_type_size = 64;
691 if (rs6000_long_double_size_string)
692 {
693 char *tail;
694 int size = strtol (rs6000_long_double_size_string, &tail, 10);
695 if (*tail != '\0' || (size != 64 && size != 128))
696 error ("Unknown switch -mlong-double-%s",
697 rs6000_long_double_size_string);
698 else
699 rs6000_long_double_type_size = size;
700 }
701
702 /* Handle -mabi= options. */
703 rs6000_parse_abi_options ();
704
705 /* Handle -mvrsave= option. */
706 rs6000_parse_vrsave_option ();
707
708 /* Handle -misel= option. */
709 rs6000_parse_isel_option ();
710
711 /* Handle -mspe= option. */
712 rs6000_parse_spe_option ();
713
714 #ifdef SUBTARGET_OVERRIDE_OPTIONS
715 SUBTARGET_OVERRIDE_OPTIONS;
716 #endif
717 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
718 SUBSUBTARGET_OVERRIDE_OPTIONS;
719 #endif
720
721 /* The e500 does not have string instructions, and we set
722 MASK_STRING above when optimizing for size. */
723 if (rs6000_cpu == PROCESSOR_PPC8540 && (target_flags & MASK_STRING) != 0)
724 target_flags = target_flags & ~MASK_STRING;
725
726 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
727 using TARGET_OPTIONS to handle a toggle switch, but we're out of
728 bits in target_flags so TARGET_SWITCHES cannot be used.
729 Assumption here is that rs6000_longcall_switch points into the
730 text of the complete option, rather than being a copy, so we can
731 scan back for the presence or absence of the no- modifier. */
732 if (rs6000_longcall_switch)
733 {
734 const char *base = rs6000_longcall_switch;
735 while (base[-1] != 'm') base--;
736
737 if (*rs6000_longcall_switch != '\0')
738 error ("invalid option `%s'", base);
739 rs6000_default_long_calls = (base[0] != 'n');
740 }
741
742 #ifdef TARGET_REGNAMES
743 /* If the user desires alternate register names, copy in the
744 alternate names now. */
745 if (TARGET_REGNAMES)
746 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
747 #endif
748
749 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
750 If -maix-struct-return or -msvr4-struct-return was explicitly
751 used, don't override with the ABI default. */
752 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
753 {
754 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
755 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
756 else
757 target_flags |= MASK_AIX_STRUCT_RET;
758 }
759
760 if (TARGET_LONG_DOUBLE_128
761 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
762 real_format_for_mode[TFmode - QFmode] = &ibm_extended_format;
763
764 /* Allocate an alias set for register saves & restores from stack. */
765 rs6000_sr_alias_set = new_alias_set ();
766
767 if (TARGET_TOC)
768 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
769
770 /* We can only guarantee the availability of DI pseudo-ops when
771 assembling for 64-bit targets. */
772 if (!TARGET_64BIT)
773 {
774 targetm.asm_out.aligned_op.di = NULL;
775 targetm.asm_out.unaligned_op.di = NULL;
776 }
777
778 /* Set maximum branch target alignment at two instructions, eight bytes. */
779 align_jumps_max_skip = 8;
780 align_loops_max_skip = 8;
781
782 /* Arrange to save and restore machine status around nested functions. */
783 init_machine_status = rs6000_init_machine_status;
784 }
785
786 /* Handle -misel= option. */
787 static void
788 rs6000_parse_isel_option ()
789 {
790 if (rs6000_isel_string == 0)
791 return;
792 else if (! strcmp (rs6000_isel_string, "yes"))
793 rs6000_isel = 1;
794 else if (! strcmp (rs6000_isel_string, "no"))
795 rs6000_isel = 0;
796 else
797 error ("unknown -misel= option specified: '%s'",
798 rs6000_isel_string);
799 }
800
801 /* Handle -mspe= option. */
802 static void
803 rs6000_parse_spe_option (void)
804 {
805 if (rs6000_spe_string == 0)
806 return;
807 else if (!strcmp (rs6000_spe_string, "yes"))
808 rs6000_spe = 1;
809 else if (!strcmp (rs6000_spe_string, "no"))
810 rs6000_spe = 0;
811 else
812 error ("unknown -mspe= option specified: '%s'", rs6000_spe_string);
813 }
814
815 /* Handle -mvrsave= options. */
816 static void
817 rs6000_parse_vrsave_option ()
818 {
819 /* Generate VRSAVE instructions by default. */
820 if (rs6000_altivec_vrsave_string == 0
821 || ! strcmp (rs6000_altivec_vrsave_string, "yes"))
822 rs6000_altivec_vrsave = 1;
823 else if (! strcmp (rs6000_altivec_vrsave_string, "no"))
824 rs6000_altivec_vrsave = 0;
825 else
826 error ("unknown -mvrsave= option specified: '%s'",
827 rs6000_altivec_vrsave_string);
828 }
829
830 /* Handle -mabi= options. */
831 static void
832 rs6000_parse_abi_options ()
833 {
834 if (rs6000_abi_string == 0)
835 return;
836 else if (! strcmp (rs6000_abi_string, "altivec"))
837 rs6000_altivec_abi = 1;
838 else if (! strcmp (rs6000_abi_string, "no-altivec"))
839 rs6000_altivec_abi = 0;
840 else if (! strcmp (rs6000_abi_string, "spe"))
841 {
842 rs6000_spe_abi = 1;
843 if (!TARGET_SPE_ABI)
844 error ("not configured for ABI: '%s'", rs6000_abi_string);
845 }
846
847 else if (! strcmp (rs6000_abi_string, "no-spe"))
848 rs6000_spe_abi = 0;
849 else
850 error ("unknown ABI specified: '%s'", rs6000_abi_string);
851 }
852
853 void
854 optimization_options (level, size)
855 int level ATTRIBUTE_UNUSED;
856 int size ATTRIBUTE_UNUSED;
857 {
858 }
859 \f
860 /* Do anything needed at the start of the asm file. */
861
862 void
863 rs6000_file_start (file, default_cpu)
864 FILE *file;
865 const char *default_cpu;
866 {
867 size_t i;
868 char buffer[80];
869 const char *start = buffer;
870 struct rs6000_cpu_select *ptr;
871
872 if (flag_verbose_asm)
873 {
874 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
875 rs6000_select[0].string = default_cpu;
876
877 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
878 {
879 ptr = &rs6000_select[i];
880 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
881 {
882 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
883 start = "";
884 }
885 }
886
887 #ifdef USING_ELFOS_H
888 switch (rs6000_sdata)
889 {
890 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
891 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
892 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
893 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
894 }
895
896 if (rs6000_sdata && g_switch_value)
897 {
898 fprintf (file, "%s -G %d", start, g_switch_value);
899 start = "";
900 }
901 #endif
902
903 if (*start == '\0')
904 putc ('\n', file);
905 }
906 }
907 \f
908 /* Return nonzero if this function is known to have a null epilogue. */
909
910 int
911 direct_return ()
912 {
913 if (reload_completed)
914 {
915 rs6000_stack_t *info = rs6000_stack_info ();
916
917 if (info->first_gp_reg_save == 32
918 && info->first_fp_reg_save == 64
919 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
920 && ! info->lr_save_p
921 && ! info->cr_save_p
922 && info->vrsave_mask == 0
923 && ! info->push_p)
924 return 1;
925 }
926
927 return 0;
928 }
929
930 /* Returns 1 always. */
931
932 int
933 any_operand (op, mode)
934 rtx op ATTRIBUTE_UNUSED;
935 enum machine_mode mode ATTRIBUTE_UNUSED;
936 {
937 return 1;
938 }
939
940 /* Returns 1 if op is the count register. */
941 int
942 count_register_operand (op, mode)
943 rtx op;
944 enum machine_mode mode ATTRIBUTE_UNUSED;
945 {
946 if (GET_CODE (op) != REG)
947 return 0;
948
949 if (REGNO (op) == COUNT_REGISTER_REGNUM)
950 return 1;
951
952 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
953 return 1;
954
955 return 0;
956 }
957
958 /* Returns 1 if op is an altivec register. */
959 int
960 altivec_register_operand (op, mode)
961 rtx op;
962 enum machine_mode mode ATTRIBUTE_UNUSED;
963 {
964
965 return (register_operand (op, mode)
966 && (GET_CODE (op) != REG
967 || REGNO (op) > FIRST_PSEUDO_REGISTER
968 || ALTIVEC_REGNO_P (REGNO (op))));
969 }
970
971 int
972 xer_operand (op, mode)
973 rtx op;
974 enum machine_mode mode ATTRIBUTE_UNUSED;
975 {
976 if (GET_CODE (op) != REG)
977 return 0;
978
979 if (XER_REGNO_P (REGNO (op)))
980 return 1;
981
982 return 0;
983 }
984
985 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
986 by such constants completes more quickly. */
987
988 int
989 s8bit_cint_operand (op, mode)
990 rtx op;
991 enum machine_mode mode ATTRIBUTE_UNUSED;
992 {
993 return ( GET_CODE (op) == CONST_INT
994 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
995 }
996
997 /* Return 1 if OP is a constant that can fit in a D field. */
998
999 int
1000 short_cint_operand (op, mode)
1001 rtx op;
1002 enum machine_mode mode ATTRIBUTE_UNUSED;
1003 {
1004 return (GET_CODE (op) == CONST_INT
1005 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
1006 }
1007
1008 /* Similar for an unsigned D field. */
1009
1010 int
1011 u_short_cint_operand (op, mode)
1012 rtx op;
1013 enum machine_mode mode ATTRIBUTE_UNUSED;
1014 {
1015 return (GET_CODE (op) == CONST_INT
1016 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
1017 }
1018
1019 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1020
1021 int
1022 non_short_cint_operand (op, mode)
1023 rtx op;
1024 enum machine_mode mode ATTRIBUTE_UNUSED;
1025 {
1026 return (GET_CODE (op) == CONST_INT
1027 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1028 }
1029
1030 /* Returns 1 if OP is a CONST_INT that is a positive value
1031 and an exact power of 2. */
1032
1033 int
1034 exact_log2_cint_operand (op, mode)
1035 rtx op;
1036 enum machine_mode mode ATTRIBUTE_UNUSED;
1037 {
1038 return (GET_CODE (op) == CONST_INT
1039 && INTVAL (op) > 0
1040 && exact_log2 (INTVAL (op)) >= 0);
1041 }
1042
1043 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1044 ctr, or lr). */
1045
1046 int
1047 gpc_reg_operand (op, mode)
1048 rtx op;
1049 enum machine_mode mode;
1050 {
1051 return (register_operand (op, mode)
1052 && (GET_CODE (op) != REG
1053 || (REGNO (op) >= ARG_POINTER_REGNUM
1054 && !XER_REGNO_P (REGNO (op)))
1055 || REGNO (op) < MQ_REGNO));
1056 }
1057
1058 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1059 CR field. */
1060
1061 int
1062 cc_reg_operand (op, mode)
1063 rtx op;
1064 enum machine_mode mode;
1065 {
1066 return (register_operand (op, mode)
1067 && (GET_CODE (op) != REG
1068 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1069 || CR_REGNO_P (REGNO (op))));
1070 }
1071
1072 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1073 CR field that isn't CR0. */
1074
1075 int
1076 cc_reg_not_cr0_operand (op, mode)
1077 rtx op;
1078 enum machine_mode mode;
1079 {
1080 return (register_operand (op, mode)
1081 && (GET_CODE (op) != REG
1082 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1083 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1084 }
1085
1086 /* Returns 1 if OP is either a constant integer valid for a D-field or
1087 a non-special register. If a register, it must be in the proper
1088 mode unless MODE is VOIDmode. */
1089
1090 int
1091 reg_or_short_operand (op, mode)
1092 rtx op;
1093 enum machine_mode mode;
1094 {
1095 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1096 }
1097
1098 /* Similar, except check if the negation of the constant would be
1099 valid for a D-field. */
1100
1101 int
1102 reg_or_neg_short_operand (op, mode)
1103 rtx op;
1104 enum machine_mode mode;
1105 {
1106 if (GET_CODE (op) == CONST_INT)
1107 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1108
1109 return gpc_reg_operand (op, mode);
1110 }
1111
1112 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1113 a non-special register. If a register, it must be in the proper
1114 mode unless MODE is VOIDmode. */
1115
1116 int
1117 reg_or_aligned_short_operand (op, mode)
1118 rtx op;
1119 enum machine_mode mode;
1120 {
1121 if (gpc_reg_operand (op, mode))
1122 return 1;
1123 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1124 return 1;
1125
1126 return 0;
1127 }
1128
1129
1130 /* Return 1 if the operand is either a register or an integer whose
1131 high-order 16 bits are zero. */
1132
1133 int
1134 reg_or_u_short_operand (op, mode)
1135 rtx op;
1136 enum machine_mode mode;
1137 {
1138 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1139 }
1140
1141 /* Return 1 is the operand is either a non-special register or ANY
1142 constant integer. */
1143
1144 int
1145 reg_or_cint_operand (op, mode)
1146 rtx op;
1147 enum machine_mode mode;
1148 {
1149 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1150 }
1151
1152 /* Return 1 is the operand is either a non-special register or ANY
1153 32-bit signed constant integer. */
1154
1155 int
1156 reg_or_arith_cint_operand (op, mode)
1157 rtx op;
1158 enum machine_mode mode;
1159 {
1160 return (gpc_reg_operand (op, mode)
1161 || (GET_CODE (op) == CONST_INT
1162 #if HOST_BITS_PER_WIDE_INT != 32
1163 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1164 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1165 #endif
1166 ));
1167 }
1168
1169 /* Return 1 is the operand is either a non-special register or a 32-bit
1170 signed constant integer valid for 64-bit addition. */
1171
1172 int
1173 reg_or_add_cint64_operand (op, mode)
1174 rtx op;
1175 enum machine_mode mode;
1176 {
1177 return (gpc_reg_operand (op, mode)
1178 || (GET_CODE (op) == CONST_INT
1179 #if HOST_BITS_PER_WIDE_INT == 32
1180 && INTVAL (op) < 0x7fff8000
1181 #else
1182 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1183 < 0x100000000ll)
1184 #endif
1185 ));
1186 }
1187
1188 /* Return 1 is the operand is either a non-special register or a 32-bit
1189 signed constant integer valid for 64-bit subtraction. */
1190
1191 int
1192 reg_or_sub_cint64_operand (op, mode)
1193 rtx op;
1194 enum machine_mode mode;
1195 {
1196 return (gpc_reg_operand (op, mode)
1197 || (GET_CODE (op) == CONST_INT
1198 #if HOST_BITS_PER_WIDE_INT == 32
1199 && (- INTVAL (op)) < 0x7fff8000
1200 #else
1201 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1202 < 0x100000000ll)
1203 #endif
1204 ));
1205 }
1206
1207 /* Return 1 is the operand is either a non-special register or ANY
1208 32-bit unsigned constant integer. */
1209
1210 int
1211 reg_or_logical_cint_operand (op, mode)
1212 rtx op;
1213 enum machine_mode mode;
1214 {
1215 if (GET_CODE (op) == CONST_INT)
1216 {
1217 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1218 {
1219 if (GET_MODE_BITSIZE (mode) <= 32)
1220 abort ();
1221
1222 if (INTVAL (op) < 0)
1223 return 0;
1224 }
1225
1226 return ((INTVAL (op) & GET_MODE_MASK (mode)
1227 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1228 }
1229 else if (GET_CODE (op) == CONST_DOUBLE)
1230 {
1231 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1232 || mode != DImode)
1233 abort ();
1234
1235 return CONST_DOUBLE_HIGH (op) == 0;
1236 }
1237 else
1238 return gpc_reg_operand (op, mode);
1239 }
1240
1241 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1242
1243 int
1244 got_operand (op, mode)
1245 rtx op;
1246 enum machine_mode mode ATTRIBUTE_UNUSED;
1247 {
1248 return (GET_CODE (op) == SYMBOL_REF
1249 || GET_CODE (op) == CONST
1250 || GET_CODE (op) == LABEL_REF);
1251 }
1252
1253 /* Return 1 if the operand is a simple references that can be loaded via
1254 the GOT (labels involving addition aren't allowed). */
1255
1256 int
1257 got_no_const_operand (op, mode)
1258 rtx op;
1259 enum machine_mode mode ATTRIBUTE_UNUSED;
1260 {
1261 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1262 }
1263
1264 /* Return the number of instructions it takes to form a constant in an
1265 integer register. */
1266
1267 static int
1268 num_insns_constant_wide (value)
1269 HOST_WIDE_INT value;
1270 {
1271 /* signed constant loadable with {cal|addi} */
1272 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1273 return 1;
1274
1275 /* constant loadable with {cau|addis} */
1276 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1277 return 1;
1278
1279 #if HOST_BITS_PER_WIDE_INT == 64
1280 else if (TARGET_POWERPC64)
1281 {
1282 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1283 HOST_WIDE_INT high = value >> 31;
1284
1285 if (high == 0 || high == -1)
1286 return 2;
1287
1288 high >>= 1;
1289
1290 if (low == 0)
1291 return num_insns_constant_wide (high) + 1;
1292 else
1293 return (num_insns_constant_wide (high)
1294 + num_insns_constant_wide (low) + 1);
1295 }
1296 #endif
1297
1298 else
1299 return 2;
1300 }
1301
1302 int
1303 num_insns_constant (op, mode)
1304 rtx op;
1305 enum machine_mode mode;
1306 {
1307 if (GET_CODE (op) == CONST_INT)
1308 {
1309 #if HOST_BITS_PER_WIDE_INT == 64
1310 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1311 && mask64_operand (op, mode))
1312 return 2;
1313 else
1314 #endif
1315 return num_insns_constant_wide (INTVAL (op));
1316 }
1317
1318 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1319 {
1320 long l;
1321 REAL_VALUE_TYPE rv;
1322
1323 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1324 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1325 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1326 }
1327
1328 else if (GET_CODE (op) == CONST_DOUBLE)
1329 {
1330 HOST_WIDE_INT low;
1331 HOST_WIDE_INT high;
1332 long l[2];
1333 REAL_VALUE_TYPE rv;
1334 int endian = (WORDS_BIG_ENDIAN == 0);
1335
1336 if (mode == VOIDmode || mode == DImode)
1337 {
1338 high = CONST_DOUBLE_HIGH (op);
1339 low = CONST_DOUBLE_LOW (op);
1340 }
1341 else
1342 {
1343 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1344 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1345 high = l[endian];
1346 low = l[1 - endian];
1347 }
1348
1349 if (TARGET_32BIT)
1350 return (num_insns_constant_wide (low)
1351 + num_insns_constant_wide (high));
1352
1353 else
1354 {
1355 if (high == 0 && low >= 0)
1356 return num_insns_constant_wide (low);
1357
1358 else if (high == -1 && low < 0)
1359 return num_insns_constant_wide (low);
1360
1361 else if (mask64_operand (op, mode))
1362 return 2;
1363
1364 else if (low == 0)
1365 return num_insns_constant_wide (high) + 1;
1366
1367 else
1368 return (num_insns_constant_wide (high)
1369 + num_insns_constant_wide (low) + 1);
1370 }
1371 }
1372
1373 else
1374 abort ();
1375 }
1376
1377 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1378 register with one instruction per word. We only do this if we can
1379 safely read CONST_DOUBLE_{LOW,HIGH}. */
1380
1381 int
1382 easy_fp_constant (op, mode)
1383 rtx op;
1384 enum machine_mode mode;
1385 {
1386 if (GET_CODE (op) != CONST_DOUBLE
1387 || GET_MODE (op) != mode
1388 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1389 return 0;
1390
1391 /* Consider all constants with -msoft-float to be easy. */
1392 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1393 && mode != DImode)
1394 return 1;
1395
1396 /* If we are using V.4 style PIC, consider all constants to be hard. */
1397 if (flag_pic && DEFAULT_ABI == ABI_V4)
1398 return 0;
1399
1400 #ifdef TARGET_RELOCATABLE
1401 /* Similarly if we are using -mrelocatable, consider all constants
1402 to be hard. */
1403 if (TARGET_RELOCATABLE)
1404 return 0;
1405 #endif
1406
1407 if (mode == TFmode)
1408 {
1409 long k[4];
1410 REAL_VALUE_TYPE rv;
1411
1412 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1413 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1414
1415 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1416 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1417 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1418 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1419 }
1420
1421 else if (mode == DFmode)
1422 {
1423 long k[2];
1424 REAL_VALUE_TYPE rv;
1425
1426 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1427 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1428
1429 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1430 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1431 }
1432
1433 else if (mode == SFmode)
1434 {
1435 long l;
1436 REAL_VALUE_TYPE rv;
1437
1438 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1439 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1440
1441 return num_insns_constant_wide (l) == 1;
1442 }
1443
1444 else if (mode == DImode)
1445 return ((TARGET_POWERPC64
1446 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1447 || (num_insns_constant (op, DImode) <= 2));
1448
1449 else if (mode == SImode)
1450 return 1;
1451 else
1452 abort ();
1453 }
1454
1455 /* Return non zero if all elements of a vector have the same value. */
1456
1457 static int
1458 easy_vector_same (op, mode)
1459 rtx op;
1460 enum machine_mode mode ATTRIBUTE_UNUSED;
1461 {
1462 int units, i, cst;
1463
1464 units = CONST_VECTOR_NUNITS (op);
1465
1466 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1467 for (i = 1; i < units; ++i)
1468 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
1469 break;
1470 if (i == units)
1471 return 1;
1472 return 0;
1473 }
1474
1475 /* Return 1 if the operand is a CONST_INT and can be put into a
1476 register without using memory. */
1477
1478 int
1479 easy_vector_constant (op, mode)
1480 rtx op;
1481 enum machine_mode mode;
1482 {
1483 int cst, cst2;
1484
1485 if (GET_CODE (op) != CONST_VECTOR
1486 || (!TARGET_ALTIVEC
1487 && !TARGET_SPE))
1488 return 0;
1489
1490 if (zero_constant (op, mode)
1491 && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
1492 || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
1493 return 1;
1494
1495 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
1496 return 0;
1497
1498 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1499 cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
1500
1501 /* Limit SPE vectors to 15 bits signed. These we can generate with:
1502 li r0, CONSTANT1
1503 evmergelo r0, r0, r0
1504 li r0, CONSTANT2
1505
1506 I don't know how efficient it would be to allow bigger constants,
1507 considering we'll have an extra 'ori' for every 'li'. I doubt 5
1508 instructions is better than a 64-bit memory load, but I don't
1509 have the e500 timing specs. */
1510 if (TARGET_SPE && mode == V2SImode
1511 && cst >= -0x7fff && cst <= 0x7fff
1512 && cst2 >= -0x7fff && cst <= 0x7fff)
1513 return 1;
1514
1515 if (TARGET_ALTIVEC && EASY_VECTOR_15 (cst, op, mode))
1516 return 1;
1517
1518 if (TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode))
1519 return 1;
1520
1521 return 0;
1522 }
1523
1524 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
1525
1526 int
1527 easy_vector_constant_add_self (op, mode)
1528 rtx op;
1529 enum machine_mode mode;
1530 {
1531 int cst;
1532
1533 if (!easy_vector_constant (op, mode))
1534 return 0;
1535
1536 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1537
1538 return TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode);
1539 }
1540
1541 const char *
1542 output_vec_const_move (operands)
1543 rtx *operands;
1544 {
1545 int cst, cst2;
1546 enum machine_mode mode;
1547 rtx dest, vec;
1548
1549 dest = operands[0];
1550 vec = operands[1];
1551
1552 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
1553 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
1554 mode = GET_MODE (dest);
1555
1556 if (TARGET_ALTIVEC)
1557 {
1558 if (zero_constant (vec, mode))
1559 return "vxor %0,%0,%0";
1560 else if (EASY_VECTOR_15 (cst, vec, mode))
1561 {
1562 operands[1] = GEN_INT (cst);
1563 switch (mode)
1564 {
1565 case V4SImode:
1566 return "vspltisw %0,%1";
1567 case V8HImode:
1568 return "vspltish %0,%1";
1569 case V16QImode:
1570 return "vspltisb %0,%1";
1571 default:
1572 abort ();
1573 }
1574 }
1575 else if (EASY_VECTOR_15_ADD_SELF (cst, vec, mode))
1576 return "#";
1577 else
1578 abort ();
1579 }
1580
1581 if (TARGET_SPE)
1582 {
1583 /* Vector constant 0 is handled as a splitter of V2SI, and in the
1584 pattern of V1DI, V4HI, and V2SF.
1585
1586 FIXME: We should probabl return # and add post reload
1587 splitters for these, but this way is so easy ;-).
1588 */
1589 operands[1] = GEN_INT (cst);
1590 operands[2] = GEN_INT (cst2);
1591 if (cst == cst2)
1592 return "li %0,%1\n\tevmergelo %0,%0,%0";
1593 else
1594 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1595 }
1596
1597 abort ();
1598 }
1599
1600 /* Return 1 if the operand is the constant 0. This works for scalars
1601 as well as vectors. */
1602 int
1603 zero_constant (op, mode)
1604 rtx op;
1605 enum machine_mode mode;
1606 {
1607 return op == CONST0_RTX (mode);
1608 }
1609
1610 /* Return 1 if the operand is 0.0. */
1611 int
1612 zero_fp_constant (op, mode)
1613 rtx op;
1614 enum machine_mode mode;
1615 {
1616 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1617 }
1618
1619 /* Return 1 if the operand is in volatile memory. Note that during
1620 the RTL generation phase, memory_operand does not return TRUE for
1621 volatile memory references. So this function allows us to
1622 recognize volatile references where its safe. */
1623
1624 int
1625 volatile_mem_operand (op, mode)
1626 rtx op;
1627 enum machine_mode mode;
1628 {
1629 if (GET_CODE (op) != MEM)
1630 return 0;
1631
1632 if (!MEM_VOLATILE_P (op))
1633 return 0;
1634
1635 if (mode != GET_MODE (op))
1636 return 0;
1637
1638 if (reload_completed)
1639 return memory_operand (op, mode);
1640
1641 if (reload_in_progress)
1642 return strict_memory_address_p (mode, XEXP (op, 0));
1643
1644 return memory_address_p (mode, XEXP (op, 0));
1645 }
1646
1647 /* Return 1 if the operand is an offsettable memory operand. */
1648
1649 int
1650 offsettable_mem_operand (op, mode)
1651 rtx op;
1652 enum machine_mode mode;
1653 {
1654 return ((GET_CODE (op) == MEM)
1655 && offsettable_address_p (reload_completed || reload_in_progress,
1656 mode, XEXP (op, 0)));
1657 }
1658
1659 /* Return 1 if the operand is either an easy FP constant (see above) or
1660 memory. */
1661
1662 int
1663 mem_or_easy_const_operand (op, mode)
1664 rtx op;
1665 enum machine_mode mode;
1666 {
1667 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1668 }
1669
1670 /* Return 1 if the operand is either a non-special register or an item
1671 that can be used as the operand of a `mode' add insn. */
1672
1673 int
1674 add_operand (op, mode)
1675 rtx op;
1676 enum machine_mode mode;
1677 {
1678 if (GET_CODE (op) == CONST_INT)
1679 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1680 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1681
1682 return gpc_reg_operand (op, mode);
1683 }
1684
1685 /* Return 1 if OP is a constant but not a valid add_operand. */
1686
1687 int
1688 non_add_cint_operand (op, mode)
1689 rtx op;
1690 enum machine_mode mode ATTRIBUTE_UNUSED;
1691 {
1692 return (GET_CODE (op) == CONST_INT
1693 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1694 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1695 }
1696
1697 /* Return 1 if the operand is a non-special register or a constant that
1698 can be used as the operand of an OR or XOR insn on the RS/6000. */
1699
1700 int
1701 logical_operand (op, mode)
1702 rtx op;
1703 enum machine_mode mode;
1704 {
1705 HOST_WIDE_INT opl, oph;
1706
1707 if (gpc_reg_operand (op, mode))
1708 return 1;
1709
1710 if (GET_CODE (op) == CONST_INT)
1711 {
1712 opl = INTVAL (op) & GET_MODE_MASK (mode);
1713
1714 #if HOST_BITS_PER_WIDE_INT <= 32
1715 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1716 return 0;
1717 #endif
1718 }
1719 else if (GET_CODE (op) == CONST_DOUBLE)
1720 {
1721 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1722 abort ();
1723
1724 opl = CONST_DOUBLE_LOW (op);
1725 oph = CONST_DOUBLE_HIGH (op);
1726 if (oph != 0)
1727 return 0;
1728 }
1729 else
1730 return 0;
1731
1732 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1733 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1734 }
1735
1736 /* Return 1 if C is a constant that is not a logical operand (as
1737 above), but could be split into one. */
1738
1739 int
1740 non_logical_cint_operand (op, mode)
1741 rtx op;
1742 enum machine_mode mode;
1743 {
1744 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1745 && ! logical_operand (op, mode)
1746 && reg_or_logical_cint_operand (op, mode));
1747 }
1748
1749 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1750 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1751 Reject all ones and all zeros, since these should have been optimized
1752 away and confuse the making of MB and ME. */
1753
1754 int
1755 mask_operand (op, mode)
1756 rtx op;
1757 enum machine_mode mode ATTRIBUTE_UNUSED;
1758 {
1759 HOST_WIDE_INT c, lsb;
1760
1761 if (GET_CODE (op) != CONST_INT)
1762 return 0;
1763
1764 c = INTVAL (op);
1765
1766 /* Fail in 64-bit mode if the mask wraps around because the upper
1767 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1768 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1769 return 0;
1770
1771 /* We don't change the number of transitions by inverting,
1772 so make sure we start with the LS bit zero. */
1773 if (c & 1)
1774 c = ~c;
1775
1776 /* Reject all zeros or all ones. */
1777 if (c == 0)
1778 return 0;
1779
1780 /* Find the first transition. */
1781 lsb = c & -c;
1782
1783 /* Invert to look for a second transition. */
1784 c = ~c;
1785
1786 /* Erase first transition. */
1787 c &= -lsb;
1788
1789 /* Find the second transition (if any). */
1790 lsb = c & -c;
1791
1792 /* Match if all the bits above are 1's (or c is zero). */
1793 return c == -lsb;
1794 }
1795
1796 /* Return 1 for the PowerPC64 rlwinm corner case. */
1797
1798 int
1799 mask_operand_wrap (op, mode)
1800 rtx op;
1801 enum machine_mode mode ATTRIBUTE_UNUSED;
1802 {
1803 HOST_WIDE_INT c, lsb;
1804
1805 if (GET_CODE (op) != CONST_INT)
1806 return 0;
1807
1808 c = INTVAL (op);
1809
1810 if ((c & 0x80000001) != 0x80000001)
1811 return 0;
1812
1813 c = ~c;
1814 if (c == 0)
1815 return 0;
1816
1817 lsb = c & -c;
1818 c = ~c;
1819 c &= -lsb;
1820 lsb = c & -c;
1821 return c == -lsb;
1822 }
1823
1824 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1825 It is if there are no more than one 1->0 or 0->1 transitions.
1826 Reject all zeros, since zero should have been optimized away and
1827 confuses the making of MB and ME. */
1828
1829 int
1830 mask64_operand (op, mode)
1831 rtx op;
1832 enum machine_mode mode ATTRIBUTE_UNUSED;
1833 {
1834 if (GET_CODE (op) == CONST_INT)
1835 {
1836 HOST_WIDE_INT c, lsb;
1837
1838 c = INTVAL (op);
1839
1840 /* Reject all zeros. */
1841 if (c == 0)
1842 return 0;
1843
1844 /* We don't change the number of transitions by inverting,
1845 so make sure we start with the LS bit zero. */
1846 if (c & 1)
1847 c = ~c;
1848
1849 /* Find the transition, and check that all bits above are 1's. */
1850 lsb = c & -c;
1851
1852 /* Match if all the bits above are 1's (or c is zero). */
1853 return c == -lsb;
1854 }
1855 return 0;
1856 }
1857
1858 /* Like mask64_operand, but allow up to three transitions. This
1859 predicate is used by insn patterns that generate two rldicl or
1860 rldicr machine insns. */
1861
1862 int
1863 mask64_2_operand (op, mode)
1864 rtx op;
1865 enum machine_mode mode ATTRIBUTE_UNUSED;
1866 {
1867 if (GET_CODE (op) == CONST_INT)
1868 {
1869 HOST_WIDE_INT c, lsb;
1870
1871 c = INTVAL (op);
1872
1873 /* Disallow all zeros. */
1874 if (c == 0)
1875 return 0;
1876
1877 /* We don't change the number of transitions by inverting,
1878 so make sure we start with the LS bit zero. */
1879 if (c & 1)
1880 c = ~c;
1881
1882 /* Find the first transition. */
1883 lsb = c & -c;
1884
1885 /* Invert to look for a second transition. */
1886 c = ~c;
1887
1888 /* Erase first transition. */
1889 c &= -lsb;
1890
1891 /* Find the second transition. */
1892 lsb = c & -c;
1893
1894 /* Invert to look for a third transition. */
1895 c = ~c;
1896
1897 /* Erase second transition. */
1898 c &= -lsb;
1899
1900 /* Find the third transition (if any). */
1901 lsb = c & -c;
1902
1903 /* Match if all the bits above are 1's (or c is zero). */
1904 return c == -lsb;
1905 }
1906 return 0;
1907 }
1908
1909 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
1910 implement ANDing by the mask IN. */
1911 void
1912 build_mask64_2_operands (in, out)
1913 rtx in;
1914 rtx *out;
1915 {
1916 #if HOST_BITS_PER_WIDE_INT >= 64
1917 unsigned HOST_WIDE_INT c, lsb, m1, m2;
1918 int shift;
1919
1920 if (GET_CODE (in) != CONST_INT)
1921 abort ();
1922
1923 c = INTVAL (in);
1924 if (c & 1)
1925 {
1926 /* Assume c initially something like 0x00fff000000fffff. The idea
1927 is to rotate the word so that the middle ^^^^^^ group of zeros
1928 is at the MS end and can be cleared with an rldicl mask. We then
1929 rotate back and clear off the MS ^^ group of zeros with a
1930 second rldicl. */
1931 c = ~c; /* c == 0xff000ffffff00000 */
1932 lsb = c & -c; /* lsb == 0x0000000000100000 */
1933 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
1934 c = ~c; /* c == 0x00fff000000fffff */
1935 c &= -lsb; /* c == 0x00fff00000000000 */
1936 lsb = c & -c; /* lsb == 0x0000100000000000 */
1937 c = ~c; /* c == 0xff000fffffffffff */
1938 c &= -lsb; /* c == 0xff00000000000000 */
1939 shift = 0;
1940 while ((lsb >>= 1) != 0)
1941 shift++; /* shift == 44 on exit from loop */
1942 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
1943 m1 = ~m1; /* m1 == 0x000000ffffffffff */
1944 m2 = ~c; /* m2 == 0x00ffffffffffffff */
1945 }
1946 else
1947 {
1948 /* Assume c initially something like 0xff000f0000000000. The idea
1949 is to rotate the word so that the ^^^ middle group of zeros
1950 is at the LS end and can be cleared with an rldicr mask. We then
1951 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
1952 a second rldicr. */
1953 lsb = c & -c; /* lsb == 0x0000010000000000 */
1954 m2 = -lsb; /* m2 == 0xffffff0000000000 */
1955 c = ~c; /* c == 0x00fff0ffffffffff */
1956 c &= -lsb; /* c == 0x00fff00000000000 */
1957 lsb = c & -c; /* lsb == 0x0000100000000000 */
1958 c = ~c; /* c == 0xff000fffffffffff */
1959 c &= -lsb; /* c == 0xff00000000000000 */
1960 shift = 0;
1961 while ((lsb >>= 1) != 0)
1962 shift++; /* shift == 44 on exit from loop */
1963 m1 = ~c; /* m1 == 0x00ffffffffffffff */
1964 m1 >>= shift; /* m1 == 0x0000000000000fff */
1965 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
1966 }
1967
1968 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
1969 masks will be all 1's. We are guaranteed more than one transition. */
1970 out[0] = GEN_INT (64 - shift);
1971 out[1] = GEN_INT (m1);
1972 out[2] = GEN_INT (shift);
1973 out[3] = GEN_INT (m2);
1974 #else
1975 (void)in;
1976 (void)out;
1977 abort ();
1978 #endif
1979 }
1980
1981 /* Return 1 if the operand is either a non-special register or a constant
1982 that can be used as the operand of a PowerPC64 logical AND insn. */
1983
1984 int
1985 and64_operand (op, mode)
1986 rtx op;
1987 enum machine_mode mode;
1988 {
1989 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1990 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1991
1992 return (logical_operand (op, mode) || mask64_operand (op, mode));
1993 }
1994
1995 /* Like the above, but also match constants that can be implemented
1996 with two rldicl or rldicr insns. */
1997
1998 int
1999 and64_2_operand (op, mode)
2000 rtx op;
2001 enum machine_mode mode;
2002 {
2003 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2004 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
2005
2006 return logical_operand (op, mode) || mask64_2_operand (op, mode);
2007 }
2008
2009 /* Return 1 if the operand is either a non-special register or a
2010 constant that can be used as the operand of an RS/6000 logical AND insn. */
2011
2012 int
2013 and_operand (op, mode)
2014 rtx op;
2015 enum machine_mode mode;
2016 {
2017 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2018 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
2019
2020 return (logical_operand (op, mode) || mask_operand (op, mode));
2021 }
2022
2023 /* Return 1 if the operand is a general register or memory operand. */
2024
2025 int
2026 reg_or_mem_operand (op, mode)
2027 rtx op;
2028 enum machine_mode mode;
2029 {
2030 return (gpc_reg_operand (op, mode)
2031 || memory_operand (op, mode)
2032 || volatile_mem_operand (op, mode));
2033 }
2034
2035 /* Return 1 if the operand is a general register or memory operand without
2036 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2037 instruction. */
2038
2039 int
2040 lwa_operand (op, mode)
2041 rtx op;
2042 enum machine_mode mode;
2043 {
2044 rtx inner = op;
2045
2046 if (reload_completed && GET_CODE (inner) == SUBREG)
2047 inner = SUBREG_REG (inner);
2048
2049 return gpc_reg_operand (inner, mode)
2050 || (memory_operand (inner, mode)
2051 && GET_CODE (XEXP (inner, 0)) != PRE_INC
2052 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
2053 && (GET_CODE (XEXP (inner, 0)) != PLUS
2054 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2055 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2056 }
2057
2058 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2059
2060 int
2061 symbol_ref_operand (op, mode)
2062 rtx op;
2063 enum machine_mode mode;
2064 {
2065 if (mode != VOIDmode && GET_MODE (op) != mode)
2066 return 0;
2067
2068 return (GET_CODE (op) == SYMBOL_REF);
2069 }
2070
2071 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2072 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2073
2074 int
2075 call_operand (op, mode)
2076 rtx op;
2077 enum machine_mode mode;
2078 {
2079 if (mode != VOIDmode && GET_MODE (op) != mode)
2080 return 0;
2081
2082 return (GET_CODE (op) == SYMBOL_REF
2083 || (GET_CODE (op) == REG
2084 && (REGNO (op) == LINK_REGISTER_REGNUM
2085 || REGNO (op) == COUNT_REGISTER_REGNUM
2086 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2087 }
2088
2089 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2090 this file and the function is not weakly defined. */
2091
2092 int
2093 current_file_function_operand (op, mode)
2094 rtx op;
2095 enum machine_mode mode ATTRIBUTE_UNUSED;
2096 {
2097 return (GET_CODE (op) == SYMBOL_REF
2098 && (SYMBOL_REF_FLAG (op)
2099 || (op == XEXP (DECL_RTL (current_function_decl), 0)
2100 && ! DECL_WEAK (current_function_decl))));
2101 }
2102
2103 /* Return 1 if this operand is a valid input for a move insn. */
2104
2105 int
2106 input_operand (op, mode)
2107 rtx op;
2108 enum machine_mode mode;
2109 {
2110 /* Memory is always valid. */
2111 if (memory_operand (op, mode))
2112 return 1;
2113
2114 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
2115 if (GET_CODE (op) == CONSTANT_P_RTX)
2116 return 1;
2117
2118 /* For floating-point, easy constants are valid. */
2119 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2120 && CONSTANT_P (op)
2121 && easy_fp_constant (op, mode))
2122 return 1;
2123
2124 /* Allow any integer constant. */
2125 if (GET_MODE_CLASS (mode) == MODE_INT
2126 && (GET_CODE (op) == CONST_INT
2127 || GET_CODE (op) == CONST_DOUBLE))
2128 return 1;
2129
2130 /* Allow easy vector constants. */
2131 if (GET_CODE (op) == CONST_VECTOR
2132 && easy_vector_constant (op, mode))
2133 return 1;
2134
2135 /* For floating-point or multi-word mode, the only remaining valid type
2136 is a register. */
2137 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2138 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2139 return register_operand (op, mode);
2140
2141 /* The only cases left are integral modes one word or smaller (we
2142 do not get called for MODE_CC values). These can be in any
2143 register. */
2144 if (register_operand (op, mode))
2145 return 1;
2146
2147 /* A SYMBOL_REF referring to the TOC is valid. */
2148 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
2149 return 1;
2150
2151 /* A constant pool expression (relative to the TOC) is valid */
2152 if (TOC_RELATIVE_EXPR_P (op))
2153 return 1;
2154
2155 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2156 to be valid. */
2157 if (DEFAULT_ABI == ABI_V4
2158 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2159 && small_data_operand (op, Pmode))
2160 return 1;
2161
2162 return 0;
2163 }
2164
2165 /* Return 1 for an operand in small memory on V.4/eabi. */
2166
2167 int
2168 small_data_operand (op, mode)
2169 rtx op ATTRIBUTE_UNUSED;
2170 enum machine_mode mode ATTRIBUTE_UNUSED;
2171 {
2172 #if TARGET_ELF
2173 rtx sym_ref;
2174
2175 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2176 return 0;
2177
2178 if (DEFAULT_ABI != ABI_V4)
2179 return 0;
2180
2181 if (GET_CODE (op) == SYMBOL_REF)
2182 sym_ref = op;
2183
2184 else if (GET_CODE (op) != CONST
2185 || GET_CODE (XEXP (op, 0)) != PLUS
2186 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2187 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2188 return 0;
2189
2190 else
2191 {
2192 rtx sum = XEXP (op, 0);
2193 HOST_WIDE_INT summand;
2194
2195 /* We have to be careful here, because it is the referenced address
2196 that must be 32k from _SDA_BASE_, not just the symbol. */
2197 summand = INTVAL (XEXP (sum, 1));
2198 if (summand < 0 || summand > g_switch_value)
2199 return 0;
2200
2201 sym_ref = XEXP (sum, 0);
2202 }
2203
2204 if (*XSTR (sym_ref, 0) != '@')
2205 return 0;
2206
2207 return 1;
2208
2209 #else
2210 return 0;
2211 #endif
2212 }
2213 \f
2214 static int
2215 constant_pool_expr_1 (op, have_sym, have_toc)
2216 rtx op;
2217 int *have_sym;
2218 int *have_toc;
2219 {
2220 switch (GET_CODE(op))
2221 {
2222 case SYMBOL_REF:
2223 if (CONSTANT_POOL_ADDRESS_P (op))
2224 {
2225 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2226 {
2227 *have_sym = 1;
2228 return 1;
2229 }
2230 else
2231 return 0;
2232 }
2233 else if (! strcmp (XSTR (op, 0), toc_label_name))
2234 {
2235 *have_toc = 1;
2236 return 1;
2237 }
2238 else
2239 return 0;
2240 case PLUS:
2241 case MINUS:
2242 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2243 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2244 case CONST:
2245 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2246 case CONST_INT:
2247 return 1;
2248 default:
2249 return 0;
2250 }
2251 }
2252
2253 int
2254 constant_pool_expr_p (op)
2255 rtx op;
2256 {
2257 int have_sym = 0;
2258 int have_toc = 0;
2259 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2260 }
2261
2262 int
2263 toc_relative_expr_p (op)
2264 rtx op;
2265 {
2266 int have_sym = 0;
2267 int have_toc = 0;
2268 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2269 }
2270
2271 /* Try machine-dependent ways of modifying an illegitimate address
2272 to be legitimate. If we find one, return the new, valid address.
2273 This is used from only one place: `memory_address' in explow.c.
2274
2275 OLDX is the address as it was before break_out_memory_refs was
2276 called. In some cases it is useful to look at this to decide what
2277 needs to be done.
2278
2279 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2280
2281 It is always safe for this function to do nothing. It exists to
2282 recognize opportunities to optimize the output.
2283
2284 On RS/6000, first check for the sum of a register with a constant
2285 integer that is out of range. If so, generate code to add the
2286 constant with the low-order 16 bits masked to the register and force
2287 this result into another register (this can be done with `cau').
2288 Then generate an address of REG+(CONST&0xffff), allowing for the
2289 possibility of bit 16 being a one.
2290
2291 Then check for the sum of a register and something not constant, try to
2292 load the other things into a register and return the sum. */
2293 rtx
2294 rs6000_legitimize_address (x, oldx, mode)
2295 rtx x;
2296 rtx oldx ATTRIBUTE_UNUSED;
2297 enum machine_mode mode;
2298 {
2299 if (GET_CODE (x) == PLUS
2300 && GET_CODE (XEXP (x, 0)) == REG
2301 && GET_CODE (XEXP (x, 1)) == CONST_INT
2302 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2303 {
2304 HOST_WIDE_INT high_int, low_int;
2305 rtx sum;
2306 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2307 high_int = INTVAL (XEXP (x, 1)) - low_int;
2308 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2309 GEN_INT (high_int)), 0);
2310 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2311 }
2312 else if (GET_CODE (x) == PLUS
2313 && GET_CODE (XEXP (x, 0)) == REG
2314 && GET_CODE (XEXP (x, 1)) != CONST_INT
2315 && GET_MODE_NUNITS (mode) == 1
2316 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2317 || TARGET_POWERPC64
2318 || (mode != DFmode && mode != TFmode))
2319 && (TARGET_POWERPC64 || mode != DImode)
2320 && mode != TImode)
2321 {
2322 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2323 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2324 }
2325 else if (ALTIVEC_VECTOR_MODE (mode))
2326 {
2327 rtx reg;
2328
2329 /* Make sure both operands are registers. */
2330 if (GET_CODE (x) == PLUS)
2331 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2332 force_reg (Pmode, XEXP (x, 1)));
2333
2334 reg = force_reg (Pmode, x);
2335 return reg;
2336 }
2337 else if (SPE_VECTOR_MODE (mode))
2338 {
2339 /* We accept [reg + reg] and [reg + OFFSET]. */
2340
2341 if (GET_CODE (x) == PLUS)
2342 {
2343 rtx op1 = XEXP (x, 0);
2344 rtx op2 = XEXP (x, 1);
2345
2346 op1 = force_reg (Pmode, op1);
2347
2348 if (GET_CODE (op2) != REG
2349 && (GET_CODE (op2) != CONST_INT
2350 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2351 op2 = force_reg (Pmode, op2);
2352
2353 return gen_rtx_PLUS (Pmode, op1, op2);
2354 }
2355
2356 return force_reg (Pmode, x);
2357 }
2358 else if (TARGET_ELF
2359 && TARGET_32BIT
2360 && TARGET_NO_TOC
2361 && ! flag_pic
2362 && GET_CODE (x) != CONST_INT
2363 && GET_CODE (x) != CONST_DOUBLE
2364 && CONSTANT_P (x)
2365 && GET_MODE_NUNITS (mode) == 1
2366 && (GET_MODE_BITSIZE (mode) <= 32
2367 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2368 {
2369 rtx reg = gen_reg_rtx (Pmode);
2370 emit_insn (gen_elf_high (reg, (x)));
2371 return gen_rtx_LO_SUM (Pmode, reg, (x));
2372 }
2373 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2374 && ! flag_pic
2375 #if TARGET_MACHO
2376 && ! MACHO_DYNAMIC_NO_PIC_P
2377 #endif
2378 && GET_CODE (x) != CONST_INT
2379 && GET_CODE (x) != CONST_DOUBLE
2380 && CONSTANT_P (x)
2381 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2382 && mode != DImode
2383 && mode != TImode)
2384 {
2385 rtx reg = gen_reg_rtx (Pmode);
2386 emit_insn (gen_macho_high (reg, (x)));
2387 return gen_rtx_LO_SUM (Pmode, reg, (x));
2388 }
2389 else if (TARGET_TOC
2390 && CONSTANT_POOL_EXPR_P (x)
2391 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2392 {
2393 return create_TOC_reference (x);
2394 }
2395 else
2396 return NULL_RTX;
2397 }
2398
2399 /* The convention appears to be to define this wherever it is used.
2400 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2401 is now used here. */
2402 #ifndef REG_MODE_OK_FOR_BASE_P
2403 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2404 #endif
2405
2406 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2407 replace the input X, or the original X if no replacement is called for.
2408 The output parameter *WIN is 1 if the calling macro should goto WIN,
2409 0 if it should not.
2410
2411 For RS/6000, we wish to handle large displacements off a base
2412 register by splitting the addend across an addiu/addis and the mem insn.
2413 This cuts number of extra insns needed from 3 to 1.
2414
2415 On Darwin, we use this to generate code for floating point constants.
2416 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2417 The Darwin code is inside #if TARGET_MACHO because only then is
2418 machopic_function_base_name() defined. */
2419 rtx
2420 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
2421 rtx x;
2422 enum machine_mode mode;
2423 int opnum;
2424 int type;
2425 int ind_levels ATTRIBUTE_UNUSED;
2426 int *win;
2427 {
2428 /* We must recognize output that we have already generated ourselves. */
2429 if (GET_CODE (x) == PLUS
2430 && GET_CODE (XEXP (x, 0)) == PLUS
2431 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2432 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2433 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2434 {
2435 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2436 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2437 opnum, (enum reload_type)type);
2438 *win = 1;
2439 return x;
2440 }
2441
2442 #if TARGET_MACHO
2443 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
2444 && GET_CODE (x) == LO_SUM
2445 && GET_CODE (XEXP (x, 0)) == PLUS
2446 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
2447 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
2448 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
2449 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
2450 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
2451 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
2452 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
2453 {
2454 /* Result of previous invocation of this function on Darwin
2455 floating point constant. */
2456 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2457 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2458 opnum, (enum reload_type)type);
2459 *win = 1;
2460 return x;
2461 }
2462 #endif
2463 if (GET_CODE (x) == PLUS
2464 && GET_CODE (XEXP (x, 0)) == REG
2465 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2466 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
2467 && GET_CODE (XEXP (x, 1)) == CONST_INT
2468 && !SPE_VECTOR_MODE (mode)
2469 && !ALTIVEC_VECTOR_MODE (mode))
2470 {
2471 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2472 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2473 HOST_WIDE_INT high
2474 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2475
2476 /* Check for 32-bit overflow. */
2477 if (high + low != val)
2478 {
2479 *win = 0;
2480 return x;
2481 }
2482
2483 /* Reload the high part into a base reg; leave the low part
2484 in the mem directly. */
2485
2486 x = gen_rtx_PLUS (GET_MODE (x),
2487 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
2488 GEN_INT (high)),
2489 GEN_INT (low));
2490
2491 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2492 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2493 opnum, (enum reload_type)type);
2494 *win = 1;
2495 return x;
2496 }
2497 #if TARGET_MACHO
2498 if (GET_CODE (x) == SYMBOL_REF
2499 && DEFAULT_ABI == ABI_DARWIN
2500 && !ALTIVEC_VECTOR_MODE (mode)
2501 && flag_pic)
2502 {
2503 /* Darwin load of floating point constant. */
2504 rtx offset = gen_rtx (CONST, Pmode,
2505 gen_rtx (MINUS, Pmode, x,
2506 gen_rtx (SYMBOL_REF, Pmode,
2507 machopic_function_base_name ())));
2508 x = gen_rtx (LO_SUM, GET_MODE (x),
2509 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
2510 gen_rtx (HIGH, Pmode, offset)), offset);
2511 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2512 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2513 opnum, (enum reload_type)type);
2514 *win = 1;
2515 return x;
2516 }
2517 if (GET_CODE (x) == SYMBOL_REF
2518 && DEFAULT_ABI == ABI_DARWIN
2519 && !ALTIVEC_VECTOR_MODE (mode)
2520 && MACHO_DYNAMIC_NO_PIC_P)
2521 {
2522 /* Darwin load of floating point constant. */
2523 x = gen_rtx (LO_SUM, GET_MODE (x),
2524 gen_rtx (HIGH, Pmode, x), x);
2525 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2526 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2527 opnum, (enum reload_type)type);
2528 *win = 1;
2529 return x;
2530 }
2531 #endif
2532 if (TARGET_TOC
2533 && CONSTANT_POOL_EXPR_P (x)
2534 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
2535 {
2536 (x) = create_TOC_reference (x);
2537 *win = 1;
2538 return x;
2539 }
2540 *win = 0;
2541 return x;
2542 }
2543
2544 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2545 that is a valid memory address for an instruction.
2546 The MODE argument is the machine mode for the MEM expression
2547 that wants to use this address.
2548
2549 On the RS/6000, there are four valid address: a SYMBOL_REF that
2550 refers to a constant pool entry of an address (or the sum of it
2551 plus a constant), a short (16-bit signed) constant plus a register,
2552 the sum of two registers, or a register indirect, possibly with an
2553 auto-increment. For DFmode and DImode with a constant plus register,
2554 we must ensure that both words are addressable or PowerPC64 with offset
2555 word aligned.
2556
2557 For modes spanning multiple registers (DFmode in 32-bit GPRs,
2558 32-bit DImode, TImode), indexed addressing cannot be used because
2559 adjacent memory cells are accessed by adding word-sized offsets
2560 during assembly output. */
2561 int
2562 rs6000_legitimate_address (mode, x, reg_ok_strict)
2563 enum machine_mode mode;
2564 rtx x;
2565 int reg_ok_strict;
2566 {
2567 if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
2568 return 1;
2569 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2570 && !ALTIVEC_VECTOR_MODE (mode)
2571 && !SPE_VECTOR_MODE (mode)
2572 && TARGET_UPDATE
2573 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
2574 return 1;
2575 if (LEGITIMATE_SMALL_DATA_P (mode, x))
2576 return 1;
2577 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
2578 return 1;
2579 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2580 if (! reg_ok_strict
2581 && GET_CODE (x) == PLUS
2582 && GET_CODE (XEXP (x, 0)) == REG
2583 && XEXP (x, 0) == virtual_stack_vars_rtx
2584 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2585 return 1;
2586 if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
2587 return 1;
2588 if (mode != TImode
2589 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2590 || TARGET_POWERPC64
2591 || (mode != DFmode && mode != TFmode))
2592 && (TARGET_POWERPC64 || mode != DImode)
2593 && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
2594 return 1;
2595 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
2596 return 1;
2597 return 0;
2598 }
2599 \f
2600 /* Try to output insns to set TARGET equal to the constant C if it can
2601 be done in less than N insns. Do all computations in MODE.
2602 Returns the place where the output has been placed if it can be
2603 done and the insns have been emitted. If it would take more than N
2604 insns, zero is returned and no insns and emitted. */
2605
2606 rtx
2607 rs6000_emit_set_const (dest, mode, source, n)
2608 rtx dest, source;
2609 enum machine_mode mode;
2610 int n ATTRIBUTE_UNUSED;
2611 {
2612 rtx result, insn, set;
2613 HOST_WIDE_INT c0, c1;
2614
2615 if (mode == QImode || mode == HImode)
2616 {
2617 if (dest == NULL)
2618 dest = gen_reg_rtx (mode);
2619 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
2620 return dest;
2621 }
2622 else if (mode == SImode)
2623 {
2624 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
2625
2626 emit_insn (gen_rtx_SET (VOIDmode, result,
2627 GEN_INT (INTVAL (source)
2628 & (~ (HOST_WIDE_INT) 0xffff))));
2629 emit_insn (gen_rtx_SET (VOIDmode, dest,
2630 gen_rtx_IOR (SImode, result,
2631 GEN_INT (INTVAL (source) & 0xffff))));
2632 result = dest;
2633 }
2634 else if (mode == DImode)
2635 {
2636 if (GET_CODE (source) == CONST_INT)
2637 {
2638 c0 = INTVAL (source);
2639 c1 = -(c0 < 0);
2640 }
2641 else if (GET_CODE (source) == CONST_DOUBLE)
2642 {
2643 #if HOST_BITS_PER_WIDE_INT >= 64
2644 c0 = CONST_DOUBLE_LOW (source);
2645 c1 = -(c0 < 0);
2646 #else
2647 c0 = CONST_DOUBLE_LOW (source);
2648 c1 = CONST_DOUBLE_HIGH (source);
2649 #endif
2650 }
2651 else
2652 abort ();
2653
2654 result = rs6000_emit_set_long_const (dest, c0, c1);
2655 }
2656 else
2657 abort ();
2658
2659 insn = get_last_insn ();
2660 set = single_set (insn);
2661 if (! CONSTANT_P (SET_SRC (set)))
2662 set_unique_reg_note (insn, REG_EQUAL, source);
2663
2664 return result;
2665 }
2666
2667 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2668 fall back to a straight forward decomposition. We do this to avoid
2669 exponential run times encountered when looking for longer sequences
2670 with rs6000_emit_set_const. */
2671 static rtx
2672 rs6000_emit_set_long_const (dest, c1, c2)
2673 rtx dest;
2674 HOST_WIDE_INT c1, c2;
2675 {
2676 if (!TARGET_POWERPC64)
2677 {
2678 rtx operand1, operand2;
2679
2680 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2681 DImode);
2682 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2683 DImode);
2684 emit_move_insn (operand1, GEN_INT (c1));
2685 emit_move_insn (operand2, GEN_INT (c2));
2686 }
2687 else
2688 {
2689 HOST_WIDE_INT ud1, ud2, ud3, ud4;
2690
2691 ud1 = c1 & 0xffff;
2692 ud2 = (c1 & 0xffff0000) >> 16;
2693 #if HOST_BITS_PER_WIDE_INT >= 64
2694 c2 = c1 >> 32;
2695 #endif
2696 ud3 = c2 & 0xffff;
2697 ud4 = (c2 & 0xffff0000) >> 16;
2698
2699 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2700 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2701 {
2702 if (ud1 & 0x8000)
2703 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
2704 else
2705 emit_move_insn (dest, GEN_INT (ud1));
2706 }
2707
2708 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2709 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2710 {
2711 if (ud2 & 0x8000)
2712 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2713 - 0x80000000));
2714 else
2715 emit_move_insn (dest, GEN_INT (ud2 << 16));
2716 if (ud1 != 0)
2717 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2718 }
2719 else if ((ud4 == 0xffff && (ud3 & 0x8000))
2720 || (ud4 == 0 && ! (ud3 & 0x8000)))
2721 {
2722 if (ud3 & 0x8000)
2723 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2724 - 0x80000000));
2725 else
2726 emit_move_insn (dest, GEN_INT (ud3 << 16));
2727
2728 if (ud2 != 0)
2729 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2730 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2731 if (ud1 != 0)
2732 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2733 }
2734 else
2735 {
2736 if (ud4 & 0x8000)
2737 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2738 - 0x80000000));
2739 else
2740 emit_move_insn (dest, GEN_INT (ud4 << 16));
2741
2742 if (ud3 != 0)
2743 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2744
2745 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2746 if (ud2 != 0)
2747 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2748 GEN_INT (ud2 << 16)));
2749 if (ud1 != 0)
2750 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2751 }
2752 }
2753 return dest;
2754 }
2755
2756 /* Emit a move from SOURCE to DEST in mode MODE. */
2757 void
2758 rs6000_emit_move (dest, source, mode)
2759 rtx dest;
2760 rtx source;
2761 enum machine_mode mode;
2762 {
2763 rtx operands[2];
2764 operands[0] = dest;
2765 operands[1] = source;
2766
2767 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2768 if (GET_CODE (operands[1]) == CONST_DOUBLE
2769 && ! FLOAT_MODE_P (mode)
2770 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2771 {
2772 /* FIXME. This should never happen. */
2773 /* Since it seems that it does, do the safe thing and convert
2774 to a CONST_INT. */
2775 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
2776 }
2777 if (GET_CODE (operands[1]) == CONST_DOUBLE
2778 && ! FLOAT_MODE_P (mode)
2779 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2780 && CONST_DOUBLE_LOW (operands[1]) >= 0)
2781 || (CONST_DOUBLE_HIGH (operands[1]) == -1
2782 && CONST_DOUBLE_LOW (operands[1]) < 0)))
2783 abort ();
2784
2785 /* Check if GCC is setting up a block move that will end up using FP
2786 registers as temporaries. We must make sure this is acceptable. */
2787 if (GET_CODE (operands[0]) == MEM
2788 && GET_CODE (operands[1]) == MEM
2789 && mode == DImode
2790 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2791 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2792 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2793 ? 32 : MEM_ALIGN (operands[0])))
2794 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2795 ? 32
2796 : MEM_ALIGN (operands[1]))))
2797 && ! MEM_VOLATILE_P (operands [0])
2798 && ! MEM_VOLATILE_P (operands [1]))
2799 {
2800 emit_move_insn (adjust_address (operands[0], SImode, 0),
2801 adjust_address (operands[1], SImode, 0));
2802 emit_move_insn (adjust_address (operands[0], SImode, 4),
2803 adjust_address (operands[1], SImode, 4));
2804 return;
2805 }
2806
2807 if (!no_new_pseudos)
2808 {
2809 if (GET_CODE (operands[1]) == MEM && optimize > 0
2810 && (mode == QImode || mode == HImode || mode == SImode)
2811 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2812 {
2813 rtx reg = gen_reg_rtx (word_mode);
2814
2815 emit_insn (gen_rtx_SET (word_mode, reg,
2816 gen_rtx_ZERO_EXTEND (word_mode,
2817 operands[1])));
2818 operands[1] = gen_lowpart (mode, reg);
2819 }
2820 if (GET_CODE (operands[0]) != REG)
2821 operands[1] = force_reg (mode, operands[1]);
2822 }
2823
2824 if (mode == SFmode && ! TARGET_POWERPC
2825 && TARGET_HARD_FLOAT && TARGET_FPRS
2826 && GET_CODE (operands[0]) == MEM)
2827 {
2828 int regnum;
2829
2830 if (reload_in_progress || reload_completed)
2831 regnum = true_regnum (operands[1]);
2832 else if (GET_CODE (operands[1]) == REG)
2833 regnum = REGNO (operands[1]);
2834 else
2835 regnum = -1;
2836
2837 /* If operands[1] is a register, on POWER it may have
2838 double-precision data in it, so truncate it to single
2839 precision. */
2840 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2841 {
2842 rtx newreg;
2843 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2844 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2845 operands[1] = newreg;
2846 }
2847 }
2848
2849 /* Handle the case where reload calls us with an invalid address;
2850 and the case of CONSTANT_P_RTX. */
2851 if (!ALTIVEC_VECTOR_MODE (mode)
2852 && (! general_operand (operands[1], mode)
2853 || ! nonimmediate_operand (operands[0], mode)
2854 || GET_CODE (operands[1]) == CONSTANT_P_RTX))
2855 {
2856 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2857 return;
2858 }
2859
2860 /* FIXME: In the long term, this switch statement should go away
2861 and be replaced by a sequence of tests based on things like
2862 mode == Pmode. */
2863 switch (mode)
2864 {
2865 case HImode:
2866 case QImode:
2867 if (CONSTANT_P (operands[1])
2868 && GET_CODE (operands[1]) != CONST_INT)
2869 operands[1] = force_const_mem (mode, operands[1]);
2870 break;
2871
2872 case TFmode:
2873 case DFmode:
2874 case SFmode:
2875 if (CONSTANT_P (operands[1])
2876 && ! easy_fp_constant (operands[1], mode))
2877 operands[1] = force_const_mem (mode, operands[1]);
2878 break;
2879
2880 case V16QImode:
2881 case V8HImode:
2882 case V4SFmode:
2883 case V4SImode:
2884 case V4HImode:
2885 case V2SFmode:
2886 case V2SImode:
2887 case V1DImode:
2888 if (CONSTANT_P (operands[1])
2889 && !easy_vector_constant (operands[1], mode))
2890 operands[1] = force_const_mem (mode, operands[1]);
2891 break;
2892
2893 case SImode:
2894 case DImode:
2895 /* Use default pattern for address of ELF small data */
2896 if (TARGET_ELF
2897 && mode == Pmode
2898 && DEFAULT_ABI == ABI_V4
2899 && (GET_CODE (operands[1]) == SYMBOL_REF
2900 || GET_CODE (operands[1]) == CONST)
2901 && small_data_operand (operands[1], mode))
2902 {
2903 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2904 return;
2905 }
2906
2907 if (DEFAULT_ABI == ABI_V4
2908 && mode == Pmode && mode == SImode
2909 && flag_pic == 1 && got_operand (operands[1], mode))
2910 {
2911 emit_insn (gen_movsi_got (operands[0], operands[1]));
2912 return;
2913 }
2914
2915 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2916 && TARGET_NO_TOC
2917 && ! flag_pic
2918 && mode == Pmode
2919 && CONSTANT_P (operands[1])
2920 && GET_CODE (operands[1]) != HIGH
2921 && GET_CODE (operands[1]) != CONST_INT)
2922 {
2923 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2924
2925 /* If this is a function address on -mcall-aixdesc,
2926 convert it to the address of the descriptor. */
2927 if (DEFAULT_ABI == ABI_AIX
2928 && GET_CODE (operands[1]) == SYMBOL_REF
2929 && XSTR (operands[1], 0)[0] == '.')
2930 {
2931 const char *name = XSTR (operands[1], 0);
2932 rtx new_ref;
2933 while (*name == '.')
2934 name++;
2935 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2936 CONSTANT_POOL_ADDRESS_P (new_ref)
2937 = CONSTANT_POOL_ADDRESS_P (operands[1]);
2938 SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2939 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2940 operands[1] = new_ref;
2941 }
2942
2943 if (DEFAULT_ABI == ABI_DARWIN)
2944 {
2945 #if TARGET_MACHO
2946 if (MACHO_DYNAMIC_NO_PIC_P)
2947 {
2948 /* Take care of any required data indirection. */
2949 operands[1] = rs6000_machopic_legitimize_pic_address (
2950 operands[1], mode, operands[0]);
2951 if (operands[0] != operands[1])
2952 emit_insn (gen_rtx_SET (VOIDmode,
2953 operands[0], operands[1]));
2954 return;
2955 }
2956 #endif
2957 emit_insn (gen_macho_high (target, operands[1]));
2958 emit_insn (gen_macho_low (operands[0], target, operands[1]));
2959 return;
2960 }
2961
2962 emit_insn (gen_elf_high (target, operands[1]));
2963 emit_insn (gen_elf_low (operands[0], target, operands[1]));
2964 return;
2965 }
2966
2967 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2968 and we have put it in the TOC, we just need to make a TOC-relative
2969 reference to it. */
2970 if (TARGET_TOC
2971 && GET_CODE (operands[1]) == SYMBOL_REF
2972 && CONSTANT_POOL_EXPR_P (operands[1])
2973 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2974 get_pool_mode (operands[1])))
2975 {
2976 operands[1] = create_TOC_reference (operands[1]);
2977 }
2978 else if (mode == Pmode
2979 && CONSTANT_P (operands[1])
2980 && ((GET_CODE (operands[1]) != CONST_INT
2981 && ! easy_fp_constant (operands[1], mode))
2982 || (GET_CODE (operands[1]) == CONST_INT
2983 && num_insns_constant (operands[1], mode) > 2)
2984 || (GET_CODE (operands[0]) == REG
2985 && FP_REGNO_P (REGNO (operands[0]))))
2986 && GET_CODE (operands[1]) != HIGH
2987 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2988 && ! TOC_RELATIVE_EXPR_P (operands[1]))
2989 {
2990 /* Emit a USE operation so that the constant isn't deleted if
2991 expensive optimizations are turned on because nobody
2992 references it. This should only be done for operands that
2993 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2994 This should not be done for operands that contain LABEL_REFs.
2995 For now, we just handle the obvious case. */
2996 if (GET_CODE (operands[1]) != LABEL_REF)
2997 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2998
2999 #if TARGET_MACHO
3000 /* Darwin uses a special PIC legitimizer. */
3001 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
3002 {
3003 operands[1] =
3004 rs6000_machopic_legitimize_pic_address (operands[1], mode,
3005 operands[0]);
3006 if (operands[0] != operands[1])
3007 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3008 return;
3009 }
3010 #endif
3011
3012 /* If we are to limit the number of things we put in the TOC and
3013 this is a symbol plus a constant we can add in one insn,
3014 just put the symbol in the TOC and add the constant. Don't do
3015 this if reload is in progress. */
3016 if (GET_CODE (operands[1]) == CONST
3017 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
3018 && GET_CODE (XEXP (operands[1], 0)) == PLUS
3019 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
3020 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
3021 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
3022 && ! side_effects_p (operands[0]))
3023 {
3024 rtx sym =
3025 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
3026 rtx other = XEXP (XEXP (operands[1], 0), 1);
3027
3028 sym = force_reg (mode, sym);
3029 if (mode == SImode)
3030 emit_insn (gen_addsi3 (operands[0], sym, other));
3031 else
3032 emit_insn (gen_adddi3 (operands[0], sym, other));
3033 return;
3034 }
3035
3036 operands[1] = force_const_mem (mode, operands[1]);
3037
3038 if (TARGET_TOC
3039 && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
3040 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3041 get_pool_constant (XEXP (operands[1], 0)),
3042 get_pool_mode (XEXP (operands[1], 0))))
3043 {
3044 operands[1]
3045 = gen_rtx_MEM (mode,
3046 create_TOC_reference (XEXP (operands[1], 0)));
3047 set_mem_alias_set (operands[1], get_TOC_alias_set ());
3048 RTX_UNCHANGING_P (operands[1]) = 1;
3049 }
3050 }
3051 break;
3052
3053 case TImode:
3054 if (GET_CODE (operands[0]) == MEM
3055 && GET_CODE (XEXP (operands[0], 0)) != REG
3056 && ! reload_in_progress)
3057 operands[0]
3058 = replace_equiv_address (operands[0],
3059 copy_addr_to_reg (XEXP (operands[0], 0)));
3060
3061 if (GET_CODE (operands[1]) == MEM
3062 && GET_CODE (XEXP (operands[1], 0)) != REG
3063 && ! reload_in_progress)
3064 operands[1]
3065 = replace_equiv_address (operands[1],
3066 copy_addr_to_reg (XEXP (operands[1], 0)));
3067 if (TARGET_POWER)
3068 {
3069 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3070 gen_rtvec (2,
3071 gen_rtx_SET (VOIDmode,
3072 operands[0], operands[1]),
3073 gen_rtx_CLOBBER (VOIDmode,
3074 gen_rtx_SCRATCH (SImode)))));
3075 return;
3076 }
3077 break;
3078
3079 default:
3080 abort ();
3081 }
3082
3083 /* Above, we may have called force_const_mem which may have returned
3084 an invalid address. If we can, fix this up; otherwise, reload will
3085 have to deal with it. */
3086 if (GET_CODE (operands[1]) == MEM
3087 && ! memory_address_p (mode, XEXP (operands[1], 0))
3088 && ! reload_in_progress)
3089 operands[1] = adjust_address (operands[1], mode, 0);
3090
3091 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3092 return;
3093 }
3094 \f
3095 /* Initialize a variable CUM of type CUMULATIVE_ARGS
3096 for a call to a function whose data type is FNTYPE.
3097 For a library call, FNTYPE is 0.
3098
3099 For incoming args we set the number of arguments in the prototype large
3100 so we never return a PARALLEL. */
3101
3102 void
3103 init_cumulative_args (cum, fntype, libname, incoming)
3104 CUMULATIVE_ARGS *cum;
3105 tree fntype;
3106 rtx libname ATTRIBUTE_UNUSED;
3107 int incoming;
3108 {
3109 static CUMULATIVE_ARGS zero_cumulative;
3110
3111 *cum = zero_cumulative;
3112 cum->words = 0;
3113 cum->fregno = FP_ARG_MIN_REG;
3114 cum->vregno = ALTIVEC_ARG_MIN_REG;
3115 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
3116 cum->call_cookie = CALL_NORMAL;
3117 cum->sysv_gregno = GP_ARG_MIN_REG;
3118
3119 if (incoming)
3120 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
3121
3122 else if (cum->prototype)
3123 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
3124 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
3125 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
3126
3127 else
3128 cum->nargs_prototype = 0;
3129
3130 cum->orig_nargs = cum->nargs_prototype;
3131
3132 /* Check for a longcall attribute. */
3133 if (fntype
3134 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
3135 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
3136 cum->call_cookie = CALL_LONG;
3137
3138 if (TARGET_DEBUG_ARG)
3139 {
3140 fprintf (stderr, "\ninit_cumulative_args:");
3141 if (fntype)
3142 {
3143 tree ret_type = TREE_TYPE (fntype);
3144 fprintf (stderr, " ret code = %s,",
3145 tree_code_name[ (int)TREE_CODE (ret_type) ]);
3146 }
3147
3148 if (cum->call_cookie & CALL_LONG)
3149 fprintf (stderr, " longcall,");
3150
3151 fprintf (stderr, " proto = %d, nargs = %d\n",
3152 cum->prototype, cum->nargs_prototype);
3153 }
3154 }
3155 \f
3156 /* If defined, a C expression which determines whether, and in which
3157 direction, to pad out an argument with extra space. The value
3158 should be of type `enum direction': either `upward' to pad above
3159 the argument, `downward' to pad below, or `none' to inhibit
3160 padding.
3161
3162 For the AIX ABI structs are always stored left shifted in their
3163 argument slot. */
3164
3165 enum direction
3166 function_arg_padding (mode, type)
3167 enum machine_mode mode;
3168 tree type;
3169 {
3170 if (type != 0 && AGGREGATE_TYPE_P (type))
3171 return upward;
3172
3173 /* This is the default definition. */
3174 return (! BYTES_BIG_ENDIAN
3175 ? upward
3176 : ((mode == BLKmode
3177 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
3178 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
3179 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
3180 ? downward : upward));
3181 }
3182
3183 /* If defined, a C expression that gives the alignment boundary, in bits,
3184 of an argument with the specified mode and type. If it is not defined,
3185 PARM_BOUNDARY is used for all arguments.
3186
3187 V.4 wants long longs to be double word aligned. */
3188
3189 int
3190 function_arg_boundary (mode, type)
3191 enum machine_mode mode;
3192 tree type ATTRIBUTE_UNUSED;
3193 {
3194 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
3195 return 64;
3196 else if (SPE_VECTOR_MODE (mode))
3197 return 64;
3198 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3199 return 128;
3200 else
3201 return PARM_BOUNDARY;
3202 }
3203 \f
3204 /* Update the data in CUM to advance over an argument
3205 of mode MODE and data type TYPE.
3206 (TYPE is null for libcalls where that information may not be available.) */
3207
3208 void
3209 function_arg_advance (cum, mode, type, named)
3210 CUMULATIVE_ARGS *cum;
3211 enum machine_mode mode;
3212 tree type;
3213 int named;
3214 {
3215 cum->nargs_prototype--;
3216
3217 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3218 {
3219 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
3220 cum->vregno++;
3221 else
3222 cum->words += RS6000_ARG_SIZE (mode, type);
3223 }
3224 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
3225 && named && cum->sysv_gregno <= GP_ARG_MAX_REG)
3226 cum->sysv_gregno++;
3227 else if (DEFAULT_ABI == ABI_V4)
3228 {
3229 if (TARGET_HARD_FLOAT && TARGET_FPRS
3230 && (mode == SFmode || mode == DFmode))
3231 {
3232 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3233 cum->fregno++;
3234 else
3235 {
3236 if (mode == DFmode)
3237 cum->words += cum->words & 1;
3238 cum->words += RS6000_ARG_SIZE (mode, type);
3239 }
3240 }
3241 else
3242 {
3243 int n_words;
3244 int gregno = cum->sysv_gregno;
3245
3246 /* Aggregates and IEEE quad get passed by reference. */
3247 if ((type && AGGREGATE_TYPE_P (type))
3248 || mode == TFmode)
3249 n_words = 1;
3250 else
3251 n_words = RS6000_ARG_SIZE (mode, type);
3252
3253 /* Long long and SPE vectors are put in odd registers. */
3254 if (n_words == 2 && (gregno & 1) == 0)
3255 gregno += 1;
3256
3257 /* Long long and SPE vectors are not split between registers
3258 and stack. */
3259 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
3260 {
3261 /* Long long is aligned on the stack. */
3262 if (n_words == 2)
3263 cum->words += cum->words & 1;
3264 cum->words += n_words;
3265 }
3266
3267 /* Note: continuing to accumulate gregno past when we've started
3268 spilling to the stack indicates the fact that we've started
3269 spilling to the stack to expand_builtin_saveregs. */
3270 cum->sysv_gregno = gregno + n_words;
3271 }
3272
3273 if (TARGET_DEBUG_ARG)
3274 {
3275 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3276 cum->words, cum->fregno);
3277 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
3278 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
3279 fprintf (stderr, "mode = %4s, named = %d\n",
3280 GET_MODE_NAME (mode), named);
3281 }
3282 }
3283 else
3284 {
3285 int align = (TARGET_32BIT && (cum->words & 1) != 0
3286 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3287
3288 cum->words += align + RS6000_ARG_SIZE (mode, type);
3289
3290 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3291 && TARGET_HARD_FLOAT && TARGET_FPRS)
3292 cum->fregno += (mode == TFmode ? 2 : 1);
3293
3294 if (TARGET_DEBUG_ARG)
3295 {
3296 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3297 cum->words, cum->fregno);
3298 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
3299 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
3300 fprintf (stderr, "named = %d, align = %d\n", named, align);
3301 }
3302 }
3303 }
3304 \f
3305 /* Determine where to put an argument to a function.
3306 Value is zero to push the argument on the stack,
3307 or a hard register in which to store the argument.
3308
3309 MODE is the argument's machine mode.
3310 TYPE is the data type of the argument (as a tree).
3311 This is null for libcalls where that information may
3312 not be available.
3313 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3314 the preceding args and about the function being called.
3315 NAMED is nonzero if this argument is a named parameter
3316 (otherwise it is an extra parameter matching an ellipsis).
3317
3318 On RS/6000 the first eight words of non-FP are normally in registers
3319 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3320 Under V.4, the first 8 FP args are in registers.
3321
3322 If this is floating-point and no prototype is specified, we use
3323 both an FP and integer register (or possibly FP reg and stack). Library
3324 functions (when TYPE is zero) always have the proper types for args,
3325 so we can pass the FP value just in one register. emit_library_function
3326 doesn't support PARALLEL anyway. */
3327
3328 struct rtx_def *
3329 function_arg (cum, mode, type, named)
3330 CUMULATIVE_ARGS *cum;
3331 enum machine_mode mode;
3332 tree type;
3333 int named;
3334 {
3335 enum rs6000_abi abi = DEFAULT_ABI;
3336
3337 /* Return a marker to indicate whether CR1 needs to set or clear the
3338 bit that V.4 uses to say fp args were passed in registers.
3339 Assume that we don't need the marker for software floating point,
3340 or compiler generated library calls. */
3341 if (mode == VOIDmode)
3342 {
3343 if (abi == ABI_V4
3344 && cum->nargs_prototype < 0
3345 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
3346 {
3347 /* For the SPE, we need to crxor CR6 always. */
3348 if (TARGET_SPE_ABI)
3349 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
3350 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
3351 return GEN_INT (cum->call_cookie
3352 | ((cum->fregno == FP_ARG_MIN_REG)
3353 ? CALL_V4_SET_FP_ARGS
3354 : CALL_V4_CLEAR_FP_ARGS));
3355 }
3356
3357 return GEN_INT (cum->call_cookie);
3358 }
3359
3360 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3361 {
3362 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
3363 return gen_rtx_REG (mode, cum->vregno);
3364 else
3365 return NULL;
3366 }
3367 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode) && named)
3368 {
3369 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
3370 return gen_rtx_REG (mode, cum->sysv_gregno);
3371 else
3372 return NULL;
3373 }
3374 else if (abi == ABI_V4)
3375 {
3376 if (TARGET_HARD_FLOAT && TARGET_FPRS
3377 && (mode == SFmode || mode == DFmode))
3378 {
3379 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3380 return gen_rtx_REG (mode, cum->fregno);
3381 else
3382 return NULL;
3383 }
3384 else
3385 {
3386 int n_words;
3387 int gregno = cum->sysv_gregno;
3388
3389 /* Aggregates and IEEE quad get passed by reference. */
3390 if ((type && AGGREGATE_TYPE_P (type))
3391 || mode == TFmode)
3392 n_words = 1;
3393 else
3394 n_words = RS6000_ARG_SIZE (mode, type);
3395
3396 /* Long long and SPE vectors are put in odd registers. */
3397 if (n_words == 2 && (gregno & 1) == 0)
3398 gregno += 1;
3399
3400 /* Long long and SPE vectors are not split between registers
3401 and stack. */
3402 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
3403 {
3404 /* SPE vectors in ... get split into 2 registers. */
3405 if (TARGET_SPE && TARGET_SPE_ABI
3406 && SPE_VECTOR_MODE (mode) && !named)
3407 {
3408 rtx r1, r2;
3409 enum machine_mode m = SImode;
3410
3411 r1 = gen_rtx_REG (m, gregno);
3412 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
3413 r2 = gen_rtx_REG (m, gregno + 1);
3414 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
3415 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
3416 }
3417 return gen_rtx_REG (mode, gregno);
3418 }
3419 else
3420 return NULL;
3421 }
3422 }
3423 else
3424 {
3425 int align = (TARGET_32BIT && (cum->words & 1) != 0
3426 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3427 int align_words = cum->words + align;
3428
3429 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3430 return NULL_RTX;
3431
3432 if (USE_FP_FOR_ARG_P (*cum, mode, type))
3433 {
3434 if (! type
3435 || ((cum->nargs_prototype > 0)
3436 /* IBM AIX extended its linkage convention definition always
3437 to require FP args after register save area hole on the
3438 stack. */
3439 && (DEFAULT_ABI != ABI_AIX
3440 || ! TARGET_XL_CALL
3441 || (align_words < GP_ARG_NUM_REG))))
3442 return gen_rtx_REG (mode, cum->fregno);
3443
3444 return gen_rtx_PARALLEL (mode,
3445 gen_rtvec (2,
3446 gen_rtx_EXPR_LIST (VOIDmode,
3447 ((align_words >= GP_ARG_NUM_REG)
3448 ? NULL_RTX
3449 : (align_words
3450 + RS6000_ARG_SIZE (mode, type)
3451 > GP_ARG_NUM_REG
3452 /* If this is partially on the stack, then
3453 we only include the portion actually
3454 in registers here. */
3455 ? gen_rtx_REG (SImode,
3456 GP_ARG_MIN_REG + align_words)
3457 : gen_rtx_REG (mode,
3458 GP_ARG_MIN_REG + align_words))),
3459 const0_rtx),
3460 gen_rtx_EXPR_LIST (VOIDmode,
3461 gen_rtx_REG (mode, cum->fregno),
3462 const0_rtx)));
3463 }
3464 else if (align_words < GP_ARG_NUM_REG)
3465 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
3466 else
3467 return NULL_RTX;
3468 }
3469 }
3470 \f
3471 /* For an arg passed partly in registers and partly in memory,
3472 this is the number of registers used.
3473 For args passed entirely in registers or entirely in memory, zero. */
3474
3475 int
3476 function_arg_partial_nregs (cum, mode, type, named)
3477 CUMULATIVE_ARGS *cum;
3478 enum machine_mode mode;
3479 tree type;
3480 int named ATTRIBUTE_UNUSED;
3481 {
3482 if (DEFAULT_ABI == ABI_V4)
3483 return 0;
3484
3485 if (USE_FP_FOR_ARG_P (*cum, mode, type)
3486 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
3487 {
3488 if (cum->nargs_prototype >= 0)
3489 return 0;
3490 }
3491
3492 if (cum->words < GP_ARG_NUM_REG
3493 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
3494 {
3495 int ret = GP_ARG_NUM_REG - cum->words;
3496 if (ret && TARGET_DEBUG_ARG)
3497 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
3498
3499 return ret;
3500 }
3501
3502 return 0;
3503 }
3504 \f
3505 /* A C expression that indicates when an argument must be passed by
3506 reference. If nonzero for an argument, a copy of that argument is
3507 made in memory and a pointer to the argument is passed instead of
3508 the argument itself. The pointer is passed in whatever way is
3509 appropriate for passing a pointer to that type.
3510
3511 Under V.4, structures and unions are passed by reference.
3512
3513 As an extension to all ABIs, variable sized types are passed by
3514 reference. */
3515
3516 int
3517 function_arg_pass_by_reference (cum, mode, type, named)
3518 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
3519 enum machine_mode mode ATTRIBUTE_UNUSED;
3520 tree type;
3521 int named ATTRIBUTE_UNUSED;
3522 {
3523 if (DEFAULT_ABI == ABI_V4
3524 && ((type && AGGREGATE_TYPE_P (type))
3525 || mode == TFmode))
3526 {
3527 if (TARGET_DEBUG_ARG)
3528 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
3529
3530 return 1;
3531 }
3532 return type && int_size_in_bytes (type) <= 0;
3533 }
3534 \f
3535 /* Perform any needed actions needed for a function that is receiving a
3536 variable number of arguments.
3537
3538 CUM is as above.
3539
3540 MODE and TYPE are the mode and type of the current parameter.
3541
3542 PRETEND_SIZE is a variable that should be set to the amount of stack
3543 that must be pushed by the prolog to pretend that our caller pushed
3544 it.
3545
3546 Normally, this macro will push all remaining incoming registers on the
3547 stack and set PRETEND_SIZE to the length of the registers pushed. */
3548
3549 void
3550 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
3551 CUMULATIVE_ARGS *cum;
3552 enum machine_mode mode;
3553 tree type;
3554 int *pretend_size ATTRIBUTE_UNUSED;
3555 int no_rtl;
3556
3557 {
3558 CUMULATIVE_ARGS next_cum;
3559 int reg_size = TARGET_32BIT ? 4 : 8;
3560 rtx save_area = NULL_RTX, mem;
3561 int first_reg_offset, set;
3562 tree fntype;
3563 int stdarg_p;
3564
3565 fntype = TREE_TYPE (current_function_decl);
3566 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
3567 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3568 != void_type_node));
3569
3570 /* For varargs, we do not want to skip the dummy va_dcl argument.
3571 For stdargs, we do want to skip the last named argument. */
3572 next_cum = *cum;
3573 if (stdarg_p)
3574 function_arg_advance (&next_cum, mode, type, 1);
3575
3576 if (DEFAULT_ABI == ABI_V4)
3577 {
3578 /* Indicate to allocate space on the stack for varargs save area. */
3579 cfun->machine->sysv_varargs_p = 1;
3580 if (! no_rtl)
3581 save_area = plus_constant (virtual_stack_vars_rtx,
3582 - RS6000_VARARGS_SIZE);
3583
3584 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
3585 }
3586 else
3587 {
3588 first_reg_offset = next_cum.words;
3589 save_area = virtual_incoming_args_rtx;
3590 cfun->machine->sysv_varargs_p = 0;
3591
3592 if (MUST_PASS_IN_STACK (mode, type))
3593 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
3594 }
3595
3596 set = get_varargs_alias_set ();
3597 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
3598 {
3599 mem = gen_rtx_MEM (BLKmode,
3600 plus_constant (save_area,
3601 first_reg_offset * reg_size)),
3602 set_mem_alias_set (mem, set);
3603 set_mem_align (mem, BITS_PER_WORD);
3604
3605 move_block_from_reg
3606 (GP_ARG_MIN_REG + first_reg_offset, mem,
3607 GP_ARG_NUM_REG - first_reg_offset,
3608 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
3609 }
3610
3611 /* Save FP registers if needed. */
3612 if (DEFAULT_ABI == ABI_V4
3613 && TARGET_HARD_FLOAT && TARGET_FPRS
3614 && ! no_rtl
3615 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
3616 {
3617 int fregno = next_cum.fregno;
3618 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
3619 rtx lab = gen_label_rtx ();
3620 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
3621
3622 emit_jump_insn (gen_rtx_SET (VOIDmode,
3623 pc_rtx,
3624 gen_rtx_IF_THEN_ELSE (VOIDmode,
3625 gen_rtx_NE (VOIDmode, cr1,
3626 const0_rtx),
3627 gen_rtx_LABEL_REF (VOIDmode, lab),
3628 pc_rtx)));
3629
3630 while (fregno <= FP_ARG_V4_MAX_REG)
3631 {
3632 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
3633 set_mem_alias_set (mem, set);
3634 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
3635 fregno++;
3636 off += 8;
3637 }
3638
3639 emit_label (lab);
3640 }
3641 }
3642
3643 /* Create the va_list data type. */
3644
3645 tree
3646 rs6000_build_va_list ()
3647 {
3648 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
3649
3650 /* For AIX, prefer 'char *' because that's what the system
3651 header files like. */
3652 if (DEFAULT_ABI != ABI_V4)
3653 return build_pointer_type (char_type_node);
3654
3655 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3656 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
3657
3658 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
3659 unsigned_char_type_node);
3660 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
3661 unsigned_char_type_node);
3662 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
3663 ptr_type_node);
3664 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
3665 ptr_type_node);
3666
3667 DECL_FIELD_CONTEXT (f_gpr) = record;
3668 DECL_FIELD_CONTEXT (f_fpr) = record;
3669 DECL_FIELD_CONTEXT (f_ovf) = record;
3670 DECL_FIELD_CONTEXT (f_sav) = record;
3671
3672 TREE_CHAIN (record) = type_decl;
3673 TYPE_NAME (record) = type_decl;
3674 TYPE_FIELDS (record) = f_gpr;
3675 TREE_CHAIN (f_gpr) = f_fpr;
3676 TREE_CHAIN (f_fpr) = f_ovf;
3677 TREE_CHAIN (f_ovf) = f_sav;
3678
3679 layout_type (record);
3680
3681 /* The correct type is an array type of one element. */
3682 return build_array_type (record, build_index_type (size_zero_node));
3683 }
3684
3685 /* Implement va_start. */
3686
3687 void
3688 rs6000_va_start (valist, nextarg)
3689 tree valist;
3690 rtx nextarg;
3691 {
3692 HOST_WIDE_INT words, n_gpr, n_fpr;
3693 tree f_gpr, f_fpr, f_ovf, f_sav;
3694 tree gpr, fpr, ovf, sav, t;
3695
3696 /* Only SVR4 needs something special. */
3697 if (DEFAULT_ABI != ABI_V4)
3698 {
3699 std_expand_builtin_va_start (valist, nextarg);
3700 return;
3701 }
3702
3703 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3704 f_fpr = TREE_CHAIN (f_gpr);
3705 f_ovf = TREE_CHAIN (f_fpr);
3706 f_sav = TREE_CHAIN (f_ovf);
3707
3708 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3709 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3710 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3711 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3712 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3713
3714 /* Count number of gp and fp argument registers used. */
3715 words = current_function_args_info.words;
3716 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
3717 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
3718
3719 if (TARGET_DEBUG_ARG)
3720 {
3721 fputs ("va_start: words = ", stderr);
3722 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
3723 fputs (", n_gpr = ", stderr);
3724 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
3725 fputs (", n_fpr = ", stderr);
3726 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
3727 putc ('\n', stderr);
3728 }
3729
3730 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
3731 TREE_SIDE_EFFECTS (t) = 1;
3732 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3733
3734 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
3735 TREE_SIDE_EFFECTS (t) = 1;
3736 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3737
3738 /* Find the overflow area. */
3739 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3740 if (words != 0)
3741 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3742 build_int_2 (words * UNITS_PER_WORD, 0));
3743 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3744 TREE_SIDE_EFFECTS (t) = 1;
3745 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3746
3747 /* Find the register save area. */
3748 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3749 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3750 build_int_2 (-RS6000_VARARGS_SIZE, -1));
3751 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3752 TREE_SIDE_EFFECTS (t) = 1;
3753 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3754 }
3755
3756 /* Implement va_arg. */
3757
3758 rtx
3759 rs6000_va_arg (valist, type)
3760 tree valist, type;
3761 {
3762 tree f_gpr, f_fpr, f_ovf, f_sav;
3763 tree gpr, fpr, ovf, sav, reg, t, u;
3764 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3765 rtx lab_false, lab_over, addr_rtx, r;
3766
3767 if (DEFAULT_ABI != ABI_V4)
3768 {
3769 /* Variable sized types are passed by reference. */
3770 if (int_size_in_bytes (type) <= 0)
3771 {
3772 u = build_pointer_type (type);
3773
3774 /* Args grow upward. */
3775 t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
3776 build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
3777 TREE_SIDE_EFFECTS (t) = 1;
3778
3779 t = build1 (NOP_EXPR, build_pointer_type (u), t);
3780 TREE_SIDE_EFFECTS (t) = 1;
3781
3782 t = build1 (INDIRECT_REF, u, t);
3783 TREE_SIDE_EFFECTS (t) = 1;
3784
3785 return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
3786 }
3787 else
3788 return std_expand_builtin_va_arg (valist, type);
3789 }
3790
3791 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3792 f_fpr = TREE_CHAIN (f_gpr);
3793 f_ovf = TREE_CHAIN (f_fpr);
3794 f_sav = TREE_CHAIN (f_ovf);
3795
3796 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3797 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3798 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3799 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3800 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3801
3802 size = int_size_in_bytes (type);
3803 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3804
3805 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3806 {
3807 /* Aggregates and long doubles are passed by reference. */
3808 indirect_p = 1;
3809 reg = gpr;
3810 n_reg = 1;
3811 sav_ofs = 0;
3812 sav_scale = 4;
3813 size = UNITS_PER_WORD;
3814 rsize = 1;
3815 }
3816 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
3817 {
3818 /* FP args go in FP registers, if present. */
3819 indirect_p = 0;
3820 reg = fpr;
3821 n_reg = 1;
3822 sav_ofs = 8*4;
3823 sav_scale = 8;
3824 }
3825 else
3826 {
3827 /* Otherwise into GP registers. */
3828 indirect_p = 0;
3829 reg = gpr;
3830 n_reg = rsize;
3831 sav_ofs = 0;
3832 sav_scale = 4;
3833 }
3834
3835 /* Pull the value out of the saved registers ... */
3836
3837 lab_false = gen_label_rtx ();
3838 lab_over = gen_label_rtx ();
3839 addr_rtx = gen_reg_rtx (Pmode);
3840
3841 /* AltiVec vectors never go in registers. */
3842 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
3843 {
3844 TREE_THIS_VOLATILE (reg) = 1;
3845 emit_cmp_and_jump_insns
3846 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3847 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3848 lab_false);
3849
3850 /* Long long is aligned in the registers. */
3851 if (n_reg > 1)
3852 {
3853 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3854 build_int_2 (n_reg - 1, 0));
3855 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3856 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3857 TREE_SIDE_EFFECTS (u) = 1;
3858 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3859 }
3860
3861 if (sav_ofs)
3862 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3863 else
3864 t = sav;
3865
3866 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
3867 build_int_2 (n_reg, 0));
3868 TREE_SIDE_EFFECTS (u) = 1;
3869
3870 u = build1 (CONVERT_EXPR, integer_type_node, u);
3871 TREE_SIDE_EFFECTS (u) = 1;
3872
3873 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3874 TREE_SIDE_EFFECTS (u) = 1;
3875
3876 t = build (PLUS_EXPR, ptr_type_node, t, u);
3877 TREE_SIDE_EFFECTS (t) = 1;
3878
3879 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3880 if (r != addr_rtx)
3881 emit_move_insn (addr_rtx, r);
3882
3883 emit_jump_insn (gen_jump (lab_over));
3884 emit_barrier ();
3885 }
3886
3887 emit_label (lab_false);
3888
3889 /* ... otherwise out of the overflow area. */
3890
3891 /* Make sure we don't find reg 7 for the next int arg.
3892
3893 All AltiVec vectors go in the overflow area. So in the AltiVec
3894 case we need to get the vectors from the overflow area, but
3895 remember where the GPRs and FPRs are. */
3896 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
3897 || !TARGET_ALTIVEC))
3898 {
3899 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3900 TREE_SIDE_EFFECTS (t) = 1;
3901 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3902 }
3903
3904 /* Care for on-stack alignment if needed. */
3905 if (rsize <= 1)
3906 t = ovf;
3907 else
3908 {
3909 int align;
3910
3911 /* AltiVec vectors are 16 byte aligned. */
3912 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
3913 align = 15;
3914 else
3915 align = 7;
3916
3917 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
3918 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
3919 }
3920 t = save_expr (t);
3921
3922 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3923 if (r != addr_rtx)
3924 emit_move_insn (addr_rtx, r);
3925
3926 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3927 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3928 TREE_SIDE_EFFECTS (t) = 1;
3929 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3930
3931 emit_label (lab_over);
3932
3933 if (indirect_p)
3934 {
3935 r = gen_rtx_MEM (Pmode, addr_rtx);
3936 set_mem_alias_set (r, get_varargs_alias_set ());
3937 emit_move_insn (addr_rtx, r);
3938 }
3939
3940 return addr_rtx;
3941 }
3942
3943 /* Builtins. */
3944
3945 #define def_builtin(MASK, NAME, TYPE, CODE) \
3946 do { \
3947 if ((MASK) & target_flags) \
3948 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
3949 NULL, NULL_TREE); \
3950 } while (0)
3951
3952 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3953
3954 static const struct builtin_description bdesc_3arg[] =
3955 {
3956 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3957 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3958 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3959 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3960 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3961 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3962 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3963 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3964 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3965 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3966 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3967 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3968 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3969 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3970 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3971 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3972 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3973 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3974 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3975 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3976 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3977 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3978 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3979 };
3980
3981 /* DST operations: void foo (void *, const int, const char). */
3982
3983 static const struct builtin_description bdesc_dst[] =
3984 {
3985 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3986 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3987 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3988 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3989 };
3990
3991 /* Simple binary operations: VECc = foo (VECa, VECb). */
3992
3993 static struct builtin_description bdesc_2arg[] =
3994 {
3995 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3996 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3997 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3998 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3999 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
4000 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
4001 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
4002 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
4003 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
4004 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
4005 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
4006 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
4007 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
4008 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
4009 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
4010 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
4011 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
4012 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
4013 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
4014 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
4015 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
4016 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
4017 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
4018 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
4019 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
4020 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
4021 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
4022 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
4023 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
4024 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
4025 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
4026 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
4027 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
4028 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
4029 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
4030 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
4031 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
4032 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
4033 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
4034 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
4035 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
4036 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
4037 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
4038 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
4039 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
4040 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
4041 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
4042 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
4043 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
4044 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
4045 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
4046 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
4047 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
4048 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
4049 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
4050 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
4051 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
4052 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
4053 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
4054 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
4055 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
4056 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
4057 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
4058 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
4059 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
4060 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
4061 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
4062 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
4063 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
4064 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
4065 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
4066 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
4067 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
4068 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
4069 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
4070 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
4071 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
4072 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
4073 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
4074 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
4075 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
4076 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
4077 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
4078 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
4079 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
4080 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
4081 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
4082 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
4083 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
4084 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
4085 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
4086 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
4087 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
4088 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
4089 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
4090 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
4091 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
4092 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
4093 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
4094 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
4095 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
4096 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
4097 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
4098 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
4099 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
4100 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
4101 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
4102 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
4103 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
4104 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
4105 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
4106 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
4107 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
4108
4109 /* Place holder, leave as first spe builtin. */
4110 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
4111 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
4112 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
4113 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
4114 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
4115 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
4116 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
4117 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
4118 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
4119 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
4120 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
4121 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
4122 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
4123 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
4124 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
4125 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
4126 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
4127 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
4128 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
4129 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
4130 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
4131 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
4132 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
4133 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
4134 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
4135 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
4136 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
4137 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
4138 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
4139 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
4140 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
4141 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
4142 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
4143 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
4144 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
4145 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
4146 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
4147 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
4148 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
4149 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
4150 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
4151 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
4152 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
4153 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
4154 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
4155 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
4156 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
4157 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
4158 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
4159 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
4160 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
4161 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
4162 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
4163 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
4164 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
4165 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
4166 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
4167 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
4168 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
4169 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
4170 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
4171 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
4172 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
4173 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
4174 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
4175 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
4176 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
4177 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
4178 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
4179 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
4180 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
4181 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
4182 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
4183 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
4184 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
4185 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
4186 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
4187 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
4188 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
4189 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
4190 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
4191 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
4192 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
4193 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
4194 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
4195 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
4196 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
4197 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
4198 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
4199 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
4200 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
4201 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
4202 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
4203 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
4204 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
4205 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
4206 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
4207 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
4208 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
4209 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
4210 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
4211 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
4212 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
4213 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
4214 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
4215 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
4216 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
4217 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
4218 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
4219
4220 /* SPE binary operations expecting a 5-bit unsigned literal. */
4221 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
4222
4223 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
4224 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
4225 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
4226 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
4227 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
4228 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
4229 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
4230 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
4231 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
4232 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
4233 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
4234 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
4235 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
4236 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
4237 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
4238 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
4239 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
4240 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
4241 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
4242 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
4243 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
4244 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
4245 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
4246 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
4247 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
4248 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
4249
4250 /* Place-holder. Leave as last binary SPE builtin. */
4251 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
4252 };
4253
4254 /* AltiVec predicates. */
4255
4256 struct builtin_description_predicates
4257 {
4258 const unsigned int mask;
4259 const enum insn_code icode;
4260 const char *opcode;
4261 const char *const name;
4262 const enum rs6000_builtins code;
4263 };
4264
4265 static const struct builtin_description_predicates bdesc_altivec_preds[] =
4266 {
4267 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
4268 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
4269 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
4270 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
4271 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
4272 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
4273 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
4274 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
4275 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
4276 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
4277 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
4278 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
4279 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
4280 };
4281
4282 /* SPE predicates. */
4283 static struct builtin_description bdesc_spe_predicates[] =
4284 {
4285 /* Place-holder. Leave as first. */
4286 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
4287 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
4288 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
4289 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
4290 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
4291 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
4292 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
4293 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
4294 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
4295 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
4296 /* Place-holder. Leave as last. */
4297 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
4298 };
4299
4300 /* SPE evsel predicates. */
4301 static struct builtin_description bdesc_spe_evsel[] =
4302 {
4303 /* Place-holder. Leave as first. */
4304 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
4305 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
4306 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
4307 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
4308 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
4309 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
4310 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
4311 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
4312 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
4313 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
4314 /* Place-holder. Leave as last. */
4315 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
4316 };
4317
4318 /* ABS* operations. */
4319
4320 static const struct builtin_description bdesc_abs[] =
4321 {
4322 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
4323 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
4324 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
4325 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
4326 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
4327 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
4328 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
4329 };
4330
4331 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4332 foo (VECa). */
4333
4334 static struct builtin_description bdesc_1arg[] =
4335 {
4336 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
4337 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
4338 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
4339 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
4340 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
4341 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
4342 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
4343 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
4344 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
4345 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
4346 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
4347 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
4348 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
4349 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
4350 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
4351 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
4352 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
4353
4354 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4355 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4356 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
4357 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
4358 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
4359 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
4360 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
4361 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
4362 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
4363 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
4364 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
4365 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
4366 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
4367 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
4368 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
4369 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
4370 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
4371 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
4372 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
4373 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
4374 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
4375 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
4376 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
4377 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
4378 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
4379 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
4380 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
4381 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
4382 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
4383 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
4384 { 0, CODE_FOR_spe_evsplatfi, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI },
4385 { 0, CODE_FOR_spe_evsplati, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI },
4386
4387 /* Place-holder. Leave as last unary SPE builtin. */
4388 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
4389 };
4390
4391 static rtx
4392 rs6000_expand_unop_builtin (icode, arglist, target)
4393 enum insn_code icode;
4394 tree arglist;
4395 rtx target;
4396 {
4397 rtx pat;
4398 tree arg0 = TREE_VALUE (arglist);
4399 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4400 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4401 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4402
4403 if (icode == CODE_FOR_nothing)
4404 /* Builtin not supported on this processor. */
4405 return 0;
4406
4407 /* If we got invalid arguments bail out before generating bad rtl. */
4408 if (arg0 == error_mark_node)
4409 return const0_rtx;
4410
4411 if (icode == CODE_FOR_altivec_vspltisb
4412 || icode == CODE_FOR_altivec_vspltish
4413 || icode == CODE_FOR_altivec_vspltisw
4414 || icode == CODE_FOR_spe_evsplatfi
4415 || icode == CODE_FOR_spe_evsplati)
4416 {
4417 /* Only allow 5-bit *signed* literals. */
4418 if (GET_CODE (op0) != CONST_INT
4419 || INTVAL (op0) > 0x1f
4420 || INTVAL (op0) < -0x1f)
4421 {
4422 error ("argument 1 must be a 5-bit signed literal");
4423 return const0_rtx;
4424 }
4425 }
4426
4427 if (target == 0
4428 || GET_MODE (target) != tmode
4429 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4430 target = gen_reg_rtx (tmode);
4431
4432 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4433 op0 = copy_to_mode_reg (mode0, op0);
4434
4435 pat = GEN_FCN (icode) (target, op0);
4436 if (! pat)
4437 return 0;
4438 emit_insn (pat);
4439
4440 return target;
4441 }
4442
4443 static rtx
4444 altivec_expand_abs_builtin (icode, arglist, target)
4445 enum insn_code icode;
4446 tree arglist;
4447 rtx target;
4448 {
4449 rtx pat, scratch1, scratch2;
4450 tree arg0 = TREE_VALUE (arglist);
4451 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4452 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4453 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4454
4455 /* If we have invalid arguments, bail out before generating bad rtl. */
4456 if (arg0 == error_mark_node)
4457 return const0_rtx;
4458
4459 if (target == 0
4460 || GET_MODE (target) != tmode
4461 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4462 target = gen_reg_rtx (tmode);
4463
4464 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4465 op0 = copy_to_mode_reg (mode0, op0);
4466
4467 scratch1 = gen_reg_rtx (mode0);
4468 scratch2 = gen_reg_rtx (mode0);
4469
4470 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
4471 if (! pat)
4472 return 0;
4473 emit_insn (pat);
4474
4475 return target;
4476 }
4477
4478 static rtx
4479 rs6000_expand_binop_builtin (icode, arglist, target)
4480 enum insn_code icode;
4481 tree arglist;
4482 rtx target;
4483 {
4484 rtx pat;
4485 tree arg0 = TREE_VALUE (arglist);
4486 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4487 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4488 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4489 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4490 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4491 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4492
4493 if (icode == CODE_FOR_nothing)
4494 /* Builtin not supported on this processor. */
4495 return 0;
4496
4497 /* If we got invalid arguments bail out before generating bad rtl. */
4498 if (arg0 == error_mark_node || arg1 == error_mark_node)
4499 return const0_rtx;
4500
4501 if (icode == CODE_FOR_altivec_vcfux
4502 || icode == CODE_FOR_altivec_vcfsx
4503 || icode == CODE_FOR_altivec_vctsxs
4504 || icode == CODE_FOR_altivec_vctuxs
4505 || icode == CODE_FOR_altivec_vspltb
4506 || icode == CODE_FOR_altivec_vsplth
4507 || icode == CODE_FOR_altivec_vspltw
4508 || icode == CODE_FOR_spe_evaddiw
4509 || icode == CODE_FOR_spe_evldd
4510 || icode == CODE_FOR_spe_evldh
4511 || icode == CODE_FOR_spe_evldw
4512 || icode == CODE_FOR_spe_evlhhesplat
4513 || icode == CODE_FOR_spe_evlhhossplat
4514 || icode == CODE_FOR_spe_evlhhousplat
4515 || icode == CODE_FOR_spe_evlwhe
4516 || icode == CODE_FOR_spe_evlwhos
4517 || icode == CODE_FOR_spe_evlwhou
4518 || icode == CODE_FOR_spe_evlwhsplat
4519 || icode == CODE_FOR_spe_evlwwsplat
4520 || icode == CODE_FOR_spe_evrlwi
4521 || icode == CODE_FOR_spe_evslwi
4522 || icode == CODE_FOR_spe_evsrwis
4523 || icode == CODE_FOR_spe_evsrwiu)
4524 {
4525 /* Only allow 5-bit unsigned literals. */
4526 if (TREE_CODE (arg1) != INTEGER_CST
4527 || TREE_INT_CST_LOW (arg1) & ~0x1f)
4528 {
4529 error ("argument 2 must be a 5-bit unsigned literal");
4530 return const0_rtx;
4531 }
4532 }
4533
4534 if (target == 0
4535 || GET_MODE (target) != tmode
4536 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4537 target = gen_reg_rtx (tmode);
4538
4539 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4540 op0 = copy_to_mode_reg (mode0, op0);
4541 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4542 op1 = copy_to_mode_reg (mode1, op1);
4543
4544 pat = GEN_FCN (icode) (target, op0, op1);
4545 if (! pat)
4546 return 0;
4547 emit_insn (pat);
4548
4549 return target;
4550 }
4551
4552 static rtx
4553 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
4554 enum insn_code icode;
4555 const char *opcode;
4556 tree arglist;
4557 rtx target;
4558 {
4559 rtx pat, scratch;
4560 tree cr6_form = TREE_VALUE (arglist);
4561 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
4562 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4563 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4564 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4565 enum machine_mode tmode = SImode;
4566 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4567 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4568 int cr6_form_int;
4569
4570 if (TREE_CODE (cr6_form) != INTEGER_CST)
4571 {
4572 error ("argument 1 of __builtin_altivec_predicate must be a constant");
4573 return const0_rtx;
4574 }
4575 else
4576 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
4577
4578 if (mode0 != mode1)
4579 abort ();
4580
4581 /* If we have invalid arguments, bail out before generating bad rtl. */
4582 if (arg0 == error_mark_node || arg1 == error_mark_node)
4583 return const0_rtx;
4584
4585 if (target == 0
4586 || GET_MODE (target) != tmode
4587 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4588 target = gen_reg_rtx (tmode);
4589
4590 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4591 op0 = copy_to_mode_reg (mode0, op0);
4592 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4593 op1 = copy_to_mode_reg (mode1, op1);
4594
4595 scratch = gen_reg_rtx (mode0);
4596
4597 pat = GEN_FCN (icode) (scratch, op0, op1,
4598 gen_rtx (SYMBOL_REF, Pmode, opcode));
4599 if (! pat)
4600 return 0;
4601 emit_insn (pat);
4602
4603 /* The vec_any* and vec_all* predicates use the same opcodes for two
4604 different operations, but the bits in CR6 will be different
4605 depending on what information we want. So we have to play tricks
4606 with CR6 to get the right bits out.
4607
4608 If you think this is disgusting, look at the specs for the
4609 AltiVec predicates. */
4610
4611 switch (cr6_form_int)
4612 {
4613 case 0:
4614 emit_insn (gen_cr6_test_for_zero (target));
4615 break;
4616 case 1:
4617 emit_insn (gen_cr6_test_for_zero_reverse (target));
4618 break;
4619 case 2:
4620 emit_insn (gen_cr6_test_for_lt (target));
4621 break;
4622 case 3:
4623 emit_insn (gen_cr6_test_for_lt_reverse (target));
4624 break;
4625 default:
4626 error ("argument 1 of __builtin_altivec_predicate is out of range");
4627 break;
4628 }
4629
4630 return target;
4631 }
4632
4633 static rtx
4634 altivec_expand_stv_builtin (icode, arglist)
4635 enum insn_code icode;
4636 tree arglist;
4637 {
4638 tree arg0 = TREE_VALUE (arglist);
4639 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4640 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4641 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4642 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4643 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4644 rtx pat;
4645 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
4646 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
4647 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
4648
4649 /* Invalid arguments. Bail before doing anything stoopid! */
4650 if (arg0 == error_mark_node
4651 || arg1 == error_mark_node
4652 || arg2 == error_mark_node)
4653 return const0_rtx;
4654
4655 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
4656 op0 = copy_to_mode_reg (mode2, op0);
4657 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
4658 op1 = copy_to_mode_reg (mode0, op1);
4659 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
4660 op2 = copy_to_mode_reg (mode1, op2);
4661
4662 pat = GEN_FCN (icode) (op1, op2, op0);
4663 if (pat)
4664 emit_insn (pat);
4665 return NULL_RTX;
4666 }
4667
4668 static rtx
4669 rs6000_expand_ternop_builtin (icode, arglist, target)
4670 enum insn_code icode;
4671 tree arglist;
4672 rtx target;
4673 {
4674 rtx pat;
4675 tree arg0 = TREE_VALUE (arglist);
4676 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4677 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4678 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4679 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4680 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4681 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4682 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4683 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4684 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
4685
4686 if (icode == CODE_FOR_nothing)
4687 /* Builtin not supported on this processor. */
4688 return 0;
4689
4690 /* If we got invalid arguments bail out before generating bad rtl. */
4691 if (arg0 == error_mark_node
4692 || arg1 == error_mark_node
4693 || arg2 == error_mark_node)
4694 return const0_rtx;
4695
4696 if (icode == CODE_FOR_altivec_vsldoi_4sf
4697 || icode == CODE_FOR_altivec_vsldoi_4si
4698 || icode == CODE_FOR_altivec_vsldoi_8hi
4699 || icode == CODE_FOR_altivec_vsldoi_16qi)
4700 {
4701 /* Only allow 4-bit unsigned literals. */
4702 if (TREE_CODE (arg2) != INTEGER_CST
4703 || TREE_INT_CST_LOW (arg2) & ~0xf)
4704 {
4705 error ("argument 3 must be a 4-bit unsigned literal");
4706 return const0_rtx;
4707 }
4708 }
4709
4710 if (target == 0
4711 || GET_MODE (target) != tmode
4712 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4713 target = gen_reg_rtx (tmode);
4714
4715 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4716 op0 = copy_to_mode_reg (mode0, op0);
4717 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4718 op1 = copy_to_mode_reg (mode1, op1);
4719 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
4720 op2 = copy_to_mode_reg (mode2, op2);
4721
4722 pat = GEN_FCN (icode) (target, op0, op1, op2);
4723 if (! pat)
4724 return 0;
4725 emit_insn (pat);
4726
4727 return target;
4728 }
4729
4730 /* Expand the lvx builtins. */
4731 static rtx
4732 altivec_expand_ld_builtin (exp, target, expandedp)
4733 tree exp;
4734 rtx target;
4735 bool *expandedp;
4736 {
4737 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4738 tree arglist = TREE_OPERAND (exp, 1);
4739 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4740 tree arg0;
4741 enum machine_mode tmode, mode0;
4742 rtx pat, op0;
4743 enum insn_code icode;
4744
4745 switch (fcode)
4746 {
4747 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
4748 icode = CODE_FOR_altivec_lvx_16qi;
4749 break;
4750 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
4751 icode = CODE_FOR_altivec_lvx_8hi;
4752 break;
4753 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
4754 icode = CODE_FOR_altivec_lvx_4si;
4755 break;
4756 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
4757 icode = CODE_FOR_altivec_lvx_4sf;
4758 break;
4759 default:
4760 *expandedp = false;
4761 return NULL_RTX;
4762 }
4763
4764 *expandedp = true;
4765
4766 arg0 = TREE_VALUE (arglist);
4767 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4768 tmode = insn_data[icode].operand[0].mode;
4769 mode0 = insn_data[icode].operand[1].mode;
4770
4771 if (target == 0
4772 || GET_MODE (target) != tmode
4773 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4774 target = gen_reg_rtx (tmode);
4775
4776 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4777 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4778
4779 pat = GEN_FCN (icode) (target, op0);
4780 if (! pat)
4781 return 0;
4782 emit_insn (pat);
4783 return target;
4784 }
4785
4786 /* Expand the stvx builtins. */
4787 static rtx
4788 altivec_expand_st_builtin (exp, target, expandedp)
4789 tree exp;
4790 rtx target ATTRIBUTE_UNUSED;
4791 bool *expandedp;
4792 {
4793 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4794 tree arglist = TREE_OPERAND (exp, 1);
4795 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4796 tree arg0, arg1;
4797 enum machine_mode mode0, mode1;
4798 rtx pat, op0, op1;
4799 enum insn_code icode;
4800
4801 switch (fcode)
4802 {
4803 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
4804 icode = CODE_FOR_altivec_stvx_16qi;
4805 break;
4806 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
4807 icode = CODE_FOR_altivec_stvx_8hi;
4808 break;
4809 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
4810 icode = CODE_FOR_altivec_stvx_4si;
4811 break;
4812 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
4813 icode = CODE_FOR_altivec_stvx_4sf;
4814 break;
4815 default:
4816 *expandedp = false;
4817 return NULL_RTX;
4818 }
4819
4820 arg0 = TREE_VALUE (arglist);
4821 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4822 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4823 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4824 mode0 = insn_data[icode].operand[0].mode;
4825 mode1 = insn_data[icode].operand[1].mode;
4826
4827 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4828 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4829 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
4830 op1 = copy_to_mode_reg (mode1, op1);
4831
4832 pat = GEN_FCN (icode) (op0, op1);
4833 if (pat)
4834 emit_insn (pat);
4835
4836 *expandedp = true;
4837 return NULL_RTX;
4838 }
4839
4840 /* Expand the dst builtins. */
4841 static rtx
4842 altivec_expand_dst_builtin (exp, target, expandedp)
4843 tree exp;
4844 rtx target ATTRIBUTE_UNUSED;
4845 bool *expandedp;
4846 {
4847 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4848 tree arglist = TREE_OPERAND (exp, 1);
4849 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4850 tree arg0, arg1, arg2;
4851 enum machine_mode mode0, mode1, mode2;
4852 rtx pat, op0, op1, op2;
4853 struct builtin_description *d;
4854 size_t i;
4855
4856 *expandedp = false;
4857
4858 /* Handle DST variants. */
4859 d = (struct builtin_description *) bdesc_dst;
4860 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
4861 if (d->code == fcode)
4862 {
4863 arg0 = TREE_VALUE (arglist);
4864 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4865 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4866 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4867 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4868 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4869 mode0 = insn_data[d->icode].operand[0].mode;
4870 mode1 = insn_data[d->icode].operand[1].mode;
4871 mode2 = insn_data[d->icode].operand[2].mode;
4872
4873 /* Invalid arguments, bail out before generating bad rtl. */
4874 if (arg0 == error_mark_node
4875 || arg1 == error_mark_node
4876 || arg2 == error_mark_node)
4877 return const0_rtx;
4878
4879 if (TREE_CODE (arg2) != INTEGER_CST
4880 || TREE_INT_CST_LOW (arg2) & ~0x3)
4881 {
4882 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
4883 return const0_rtx;
4884 }
4885
4886 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
4887 op0 = copy_to_mode_reg (mode0, op0);
4888 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
4889 op1 = copy_to_mode_reg (mode1, op1);
4890
4891 pat = GEN_FCN (d->icode) (op0, op1, op2);
4892 if (pat != 0)
4893 emit_insn (pat);
4894
4895 *expandedp = true;
4896 return NULL_RTX;
4897 }
4898
4899 return NULL_RTX;
4900 }
4901
4902 /* Expand the builtin in EXP and store the result in TARGET. Store
4903 true in *EXPANDEDP if we found a builtin to expand. */
4904 static rtx
4905 altivec_expand_builtin (exp, target, expandedp)
4906 tree exp;
4907 rtx target;
4908 bool *expandedp;
4909 {
4910 struct builtin_description *d;
4911 struct builtin_description_predicates *dp;
4912 size_t i;
4913 enum insn_code icode;
4914 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4915 tree arglist = TREE_OPERAND (exp, 1);
4916 tree arg0;
4917 rtx op0, pat;
4918 enum machine_mode tmode, mode0;
4919 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4920
4921 target = altivec_expand_ld_builtin (exp, target, expandedp);
4922 if (*expandedp)
4923 return target;
4924
4925 target = altivec_expand_st_builtin (exp, target, expandedp);
4926 if (*expandedp)
4927 return target;
4928
4929 target = altivec_expand_dst_builtin (exp, target, expandedp);
4930 if (*expandedp)
4931 return target;
4932
4933 *expandedp = true;
4934
4935 switch (fcode)
4936 {
4937 case ALTIVEC_BUILTIN_STVX:
4938 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
4939 case ALTIVEC_BUILTIN_STVEBX:
4940 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
4941 case ALTIVEC_BUILTIN_STVEHX:
4942 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
4943 case ALTIVEC_BUILTIN_STVEWX:
4944 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
4945 case ALTIVEC_BUILTIN_STVXL:
4946 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
4947
4948 case ALTIVEC_BUILTIN_MFVSCR:
4949 icode = CODE_FOR_altivec_mfvscr;
4950 tmode = insn_data[icode].operand[0].mode;
4951
4952 if (target == 0
4953 || GET_MODE (target) != tmode
4954 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4955 target = gen_reg_rtx (tmode);
4956
4957 pat = GEN_FCN (icode) (target);
4958 if (! pat)
4959 return 0;
4960 emit_insn (pat);
4961 return target;
4962
4963 case ALTIVEC_BUILTIN_MTVSCR:
4964 icode = CODE_FOR_altivec_mtvscr;
4965 arg0 = TREE_VALUE (arglist);
4966 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4967 mode0 = insn_data[icode].operand[0].mode;
4968
4969 /* If we got invalid arguments bail out before generating bad rtl. */
4970 if (arg0 == error_mark_node)
4971 return const0_rtx;
4972
4973 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4974 op0 = copy_to_mode_reg (mode0, op0);
4975
4976 pat = GEN_FCN (icode) (op0);
4977 if (pat)
4978 emit_insn (pat);
4979 return NULL_RTX;
4980
4981 case ALTIVEC_BUILTIN_DSSALL:
4982 emit_insn (gen_altivec_dssall ());
4983 return NULL_RTX;
4984
4985 case ALTIVEC_BUILTIN_DSS:
4986 icode = CODE_FOR_altivec_dss;
4987 arg0 = TREE_VALUE (arglist);
4988 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4989 mode0 = insn_data[icode].operand[0].mode;
4990
4991 /* If we got invalid arguments bail out before generating bad rtl. */
4992 if (arg0 == error_mark_node)
4993 return const0_rtx;
4994
4995 if (TREE_CODE (arg0) != INTEGER_CST
4996 || TREE_INT_CST_LOW (arg0) & ~0x3)
4997 {
4998 error ("argument to dss must be a 2-bit unsigned literal");
4999 return const0_rtx;
5000 }
5001
5002 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5003 op0 = copy_to_mode_reg (mode0, op0);
5004
5005 emit_insn (gen_altivec_dss (op0));
5006 return NULL_RTX;
5007 }
5008
5009 /* Expand abs* operations. */
5010 d = (struct builtin_description *) bdesc_abs;
5011 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
5012 if (d->code == fcode)
5013 return altivec_expand_abs_builtin (d->icode, arglist, target);
5014
5015 /* Expand the AltiVec predicates. */
5016 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
5017 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
5018 if (dp->code == fcode)
5019 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
5020
5021 /* LV* are funky. We initialized them differently. */
5022 switch (fcode)
5023 {
5024 case ALTIVEC_BUILTIN_LVSL:
5025 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl,
5026 arglist, target);
5027 case ALTIVEC_BUILTIN_LVSR:
5028 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr,
5029 arglist, target);
5030 case ALTIVEC_BUILTIN_LVEBX:
5031 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx,
5032 arglist, target);
5033 case ALTIVEC_BUILTIN_LVEHX:
5034 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx,
5035 arglist, target);
5036 case ALTIVEC_BUILTIN_LVEWX:
5037 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx,
5038 arglist, target);
5039 case ALTIVEC_BUILTIN_LVXL:
5040 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl,
5041 arglist, target);
5042 case ALTIVEC_BUILTIN_LVX:
5043 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx,
5044 arglist, target);
5045 default:
5046 break;
5047 /* Fall through. */
5048 }
5049
5050 *expandedp = false;
5051 return NULL_RTX;
5052 }
5053
5054 /* Binops that need to be initialized manually, but can be expanded
5055 automagically by rs6000_expand_binop_builtin. */
5056 static struct builtin_description bdesc_2arg_spe[] =
5057 {
5058 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
5059 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
5060 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
5061 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
5062 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
5063 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
5064 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
5065 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
5066 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
5067 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
5068 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
5069 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
5070 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
5071 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
5072 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
5073 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
5074 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
5075 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
5076 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
5077 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
5078 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
5079 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
5080 };
5081
5082 /* Expand the builtin in EXP and store the result in TARGET. Store
5083 true in *EXPANDEDP if we found a builtin to expand.
5084
5085 This expands the SPE builtins that are not simple unary and binary
5086 operations. */
5087 static rtx
5088 spe_expand_builtin (exp, target, expandedp)
5089 tree exp;
5090 rtx target;
5091 bool *expandedp;
5092 {
5093 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5094 tree arglist = TREE_OPERAND (exp, 1);
5095 tree arg1, arg0;
5096 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5097 enum insn_code icode;
5098 enum machine_mode tmode, mode0;
5099 rtx pat, op0;
5100 struct builtin_description *d;
5101 size_t i;
5102
5103 *expandedp = true;
5104
5105 /* Syntax check for a 5-bit unsigned immediate. */
5106 switch (fcode)
5107 {
5108 case SPE_BUILTIN_EVSTDD:
5109 case SPE_BUILTIN_EVSTDH:
5110 case SPE_BUILTIN_EVSTDW:
5111 case SPE_BUILTIN_EVSTWHE:
5112 case SPE_BUILTIN_EVSTWHO:
5113 case SPE_BUILTIN_EVSTWWE:
5114 case SPE_BUILTIN_EVSTWWO:
5115 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5116 if (TREE_CODE (arg1) != INTEGER_CST
5117 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5118 {
5119 error ("argument 2 must be a 5-bit unsigned literal");
5120 return const0_rtx;
5121 }
5122 break;
5123 default:
5124 break;
5125 }
5126
5127 d = (struct builtin_description *) bdesc_2arg_spe;
5128 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
5129 if (d->code == fcode)
5130 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5131
5132 d = (struct builtin_description *) bdesc_spe_predicates;
5133 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
5134 if (d->code == fcode)
5135 return spe_expand_predicate_builtin (d->icode, arglist, target);
5136
5137 d = (struct builtin_description *) bdesc_spe_evsel;
5138 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
5139 if (d->code == fcode)
5140 return spe_expand_evsel_builtin (d->icode, arglist, target);
5141
5142 switch (fcode)
5143 {
5144 case SPE_BUILTIN_EVSTDDX:
5145 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
5146 case SPE_BUILTIN_EVSTDHX:
5147 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
5148 case SPE_BUILTIN_EVSTDWX:
5149 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
5150 case SPE_BUILTIN_EVSTWHEX:
5151 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
5152 case SPE_BUILTIN_EVSTWHOX:
5153 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
5154 case SPE_BUILTIN_EVSTWWEX:
5155 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
5156 case SPE_BUILTIN_EVSTWWOX:
5157 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
5158 case SPE_BUILTIN_EVSTDD:
5159 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
5160 case SPE_BUILTIN_EVSTDH:
5161 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
5162 case SPE_BUILTIN_EVSTDW:
5163 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
5164 case SPE_BUILTIN_EVSTWHE:
5165 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
5166 case SPE_BUILTIN_EVSTWHO:
5167 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
5168 case SPE_BUILTIN_EVSTWWE:
5169 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
5170 case SPE_BUILTIN_EVSTWWO:
5171 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
5172 case SPE_BUILTIN_MFSPEFSCR:
5173 icode = CODE_FOR_spe_mfspefscr;
5174 tmode = insn_data[icode].operand[0].mode;
5175
5176 if (target == 0
5177 || GET_MODE (target) != tmode
5178 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5179 target = gen_reg_rtx (tmode);
5180
5181 pat = GEN_FCN (icode) (target);
5182 if (! pat)
5183 return 0;
5184 emit_insn (pat);
5185 return target;
5186 case SPE_BUILTIN_MTSPEFSCR:
5187 icode = CODE_FOR_spe_mtspefscr;
5188 arg0 = TREE_VALUE (arglist);
5189 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5190 mode0 = insn_data[icode].operand[0].mode;
5191
5192 if (arg0 == error_mark_node)
5193 return const0_rtx;
5194
5195 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5196 op0 = copy_to_mode_reg (mode0, op0);
5197
5198 pat = GEN_FCN (icode) (op0);
5199 if (pat)
5200 emit_insn (pat);
5201 return NULL_RTX;
5202 default:
5203 break;
5204 }
5205
5206 *expandedp = false;
5207 return NULL_RTX;
5208 }
5209
5210 static rtx
5211 spe_expand_predicate_builtin (icode, arglist, target)
5212 enum insn_code icode;
5213 tree arglist;
5214 rtx target;
5215 {
5216 rtx pat, scratch, tmp;
5217 tree form = TREE_VALUE (arglist);
5218 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5219 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5220 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5221 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5222 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5223 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5224 int form_int;
5225 enum rtx_code code;
5226
5227 if (TREE_CODE (form) != INTEGER_CST)
5228 {
5229 error ("argument 1 of __builtin_spe_predicate must be a constant");
5230 return const0_rtx;
5231 }
5232 else
5233 form_int = TREE_INT_CST_LOW (form);
5234
5235 if (mode0 != mode1)
5236 abort ();
5237
5238 if (arg0 == error_mark_node || arg1 == error_mark_node)
5239 return const0_rtx;
5240
5241 if (target == 0
5242 || GET_MODE (target) != SImode
5243 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
5244 target = gen_reg_rtx (SImode);
5245
5246 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5247 op0 = copy_to_mode_reg (mode0, op0);
5248 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5249 op1 = copy_to_mode_reg (mode1, op1);
5250
5251 scratch = gen_reg_rtx (CCmode);
5252
5253 pat = GEN_FCN (icode) (scratch, op0, op1);
5254 if (! pat)
5255 return const0_rtx;
5256 emit_insn (pat);
5257
5258 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
5259 _lower_. We use one compare, but look in different bits of the
5260 CR for each variant.
5261
5262 There are 2 elements in each SPE simd type (upper/lower). The CR
5263 bits are set as follows:
5264
5265 BIT0 | BIT 1 | BIT 2 | BIT 3
5266 U | L | (U | L) | (U & L)
5267
5268 So, for an "all" relationship, BIT 3 would be set.
5269 For an "any" relationship, BIT 2 would be set. Etc.
5270
5271 Following traditional nomenclature, these bits map to:
5272
5273 BIT0 | BIT 1 | BIT 2 | BIT 3
5274 LT | GT | EQ | OV
5275
5276 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
5277 */
5278
5279 switch (form_int)
5280 {
5281 /* All variant. OV bit. */
5282 case 0:
5283 /* We need to get to the OV bit, which is the ORDERED bit. We
5284 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
5285 that's ugly and will trigger a validate_condition_mode abort.
5286 So let's just use another pattern. */
5287 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
5288 return target;
5289 /* Any variant. EQ bit. */
5290 case 1:
5291 code = EQ;
5292 break;
5293 /* Upper variant. LT bit. */
5294 case 2:
5295 code = LT;
5296 break;
5297 /* Lower variant. GT bit. */
5298 case 3:
5299 code = GT;
5300 break;
5301 default:
5302 error ("argument 1 of __builtin_spe_predicate is out of range");
5303 return const0_rtx;
5304 }
5305
5306 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
5307 emit_move_insn (target, tmp);
5308
5309 return target;
5310 }
5311
5312 /* The evsel builtins look like this:
5313
5314 e = __builtin_spe_evsel_OP (a, b, c, d);
5315
5316 and work like this:
5317
5318 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5319 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5320 */
5321
5322 static rtx
5323 spe_expand_evsel_builtin (icode, arglist, target)
5324 enum insn_code icode;
5325 tree arglist;
5326 rtx target;
5327 {
5328 rtx pat, scratch;
5329 tree arg0 = TREE_VALUE (arglist);
5330 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5331 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5332 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
5333 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5334 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5335 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5336 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
5337 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5338 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5339
5340 if (mode0 != mode1)
5341 abort ();
5342
5343 if (arg0 == error_mark_node || arg1 == error_mark_node
5344 || arg2 == error_mark_node || arg3 == error_mark_node)
5345 return const0_rtx;
5346
5347 if (target == 0
5348 || GET_MODE (target) != mode0
5349 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
5350 target = gen_reg_rtx (mode0);
5351
5352 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5353 op0 = copy_to_mode_reg (mode0, op0);
5354 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5355 op1 = copy_to_mode_reg (mode0, op1);
5356 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5357 op2 = copy_to_mode_reg (mode0, op2);
5358 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
5359 op3 = copy_to_mode_reg (mode0, op3);
5360
5361 /* Generate the compare. */
5362 scratch = gen_reg_rtx (CCmode);
5363 pat = GEN_FCN (icode) (scratch, op0, op1);
5364 if (! pat)
5365 return const0_rtx;
5366 emit_insn (pat);
5367
5368 if (mode0 == V2SImode)
5369 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
5370 else
5371 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
5372
5373 return target;
5374 }
5375
5376 /* Expand an expression EXP that calls a built-in function,
5377 with result going to TARGET if that's convenient
5378 (and in mode MODE if that's convenient).
5379 SUBTARGET may be used as the target for computing one of EXP's operands.
5380 IGNORE is nonzero if the value is to be ignored. */
5381
5382 static rtx
5383 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
5384 tree exp;
5385 rtx target;
5386 rtx subtarget ATTRIBUTE_UNUSED;
5387 enum machine_mode mode ATTRIBUTE_UNUSED;
5388 int ignore ATTRIBUTE_UNUSED;
5389 {
5390 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5391 tree arglist = TREE_OPERAND (exp, 1);
5392 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5393 struct builtin_description *d;
5394 size_t i;
5395 rtx ret;
5396 bool success;
5397
5398 if (TARGET_ALTIVEC)
5399 {
5400 ret = altivec_expand_builtin (exp, target, &success);
5401
5402 if (success)
5403 return ret;
5404 }
5405 if (TARGET_SPE)
5406 {
5407 ret = spe_expand_builtin (exp, target, &success);
5408
5409 if (success)
5410 return ret;
5411 }
5412
5413 if (TARGET_ALTIVEC || TARGET_SPE)
5414 {
5415 /* Handle simple unary operations. */
5416 d = (struct builtin_description *) bdesc_1arg;
5417 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5418 if (d->code == fcode)
5419 return rs6000_expand_unop_builtin (d->icode, arglist, target);
5420
5421 /* Handle simple binary operations. */
5422 d = (struct builtin_description *) bdesc_2arg;
5423 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5424 if (d->code == fcode)
5425 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5426
5427 /* Handle simple ternary operations. */
5428 d = (struct builtin_description *) bdesc_3arg;
5429 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5430 if (d->code == fcode)
5431 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
5432 }
5433
5434 abort ();
5435 return NULL_RTX;
5436 }
5437
5438 static void
5439 rs6000_init_builtins ()
5440 {
5441 if (TARGET_SPE)
5442 spe_init_builtins ();
5443 if (TARGET_ALTIVEC)
5444 altivec_init_builtins ();
5445 if (TARGET_ALTIVEC || TARGET_SPE)
5446 rs6000_common_init_builtins ();
5447 }
5448
5449 /* Search through a set of builtins and enable the mask bits.
5450 DESC is an array of builtins.
5451 SIZE is the total number of builtins.
5452 START is the builtin enum at which to start.
5453 END is the builtin enum at which to end. */
5454 static void
5455 enable_mask_for_builtins (desc, size, start, end)
5456 struct builtin_description *desc;
5457 int size;
5458 enum rs6000_builtins start, end;
5459 {
5460 int i;
5461
5462 for (i = 0; i < size; ++i)
5463 if (desc[i].code == start)
5464 break;
5465
5466 if (i == size)
5467 return;
5468
5469 for (; i < size; ++i)
5470 {
5471 /* Flip all the bits on. */
5472 desc[i].mask = target_flags;
5473 if (desc[i].code == end)
5474 break;
5475 }
5476 }
5477
5478 static void
5479 spe_init_builtins ()
5480 {
5481 tree endlink = void_list_node;
5482 tree puint_type_node = build_pointer_type (unsigned_type_node);
5483 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
5484 tree pv2si_type_node = build_pointer_type (V2SI_type_node);
5485 struct builtin_description *d;
5486 size_t i;
5487
5488 tree v2si_ftype_4_v2si
5489 = build_function_type
5490 (V2SI_type_node,
5491 tree_cons (NULL_TREE, V2SI_type_node,
5492 tree_cons (NULL_TREE, V2SI_type_node,
5493 tree_cons (NULL_TREE, V2SI_type_node,
5494 tree_cons (NULL_TREE, V2SI_type_node,
5495 endlink)))));
5496
5497 tree v2sf_ftype_4_v2sf
5498 = build_function_type
5499 (V2SF_type_node,
5500 tree_cons (NULL_TREE, V2SF_type_node,
5501 tree_cons (NULL_TREE, V2SF_type_node,
5502 tree_cons (NULL_TREE, V2SF_type_node,
5503 tree_cons (NULL_TREE, V2SF_type_node,
5504 endlink)))));
5505
5506 tree int_ftype_int_v2si_v2si
5507 = build_function_type
5508 (integer_type_node,
5509 tree_cons (NULL_TREE, integer_type_node,
5510 tree_cons (NULL_TREE, V2SI_type_node,
5511 tree_cons (NULL_TREE, V2SI_type_node,
5512 endlink))));
5513
5514 tree int_ftype_int_v2sf_v2sf
5515 = build_function_type
5516 (integer_type_node,
5517 tree_cons (NULL_TREE, integer_type_node,
5518 tree_cons (NULL_TREE, V2SF_type_node,
5519 tree_cons (NULL_TREE, V2SF_type_node,
5520 endlink))));
5521
5522 tree void_ftype_v2si_puint_int
5523 = build_function_type (void_type_node,
5524 tree_cons (NULL_TREE, V2SI_type_node,
5525 tree_cons (NULL_TREE, puint_type_node,
5526 tree_cons (NULL_TREE,
5527 integer_type_node,
5528 endlink))));
5529
5530 tree void_ftype_v2si_puint_char
5531 = build_function_type (void_type_node,
5532 tree_cons (NULL_TREE, V2SI_type_node,
5533 tree_cons (NULL_TREE, puint_type_node,
5534 tree_cons (NULL_TREE,
5535 char_type_node,
5536 endlink))));
5537
5538 tree void_ftype_v2si_pv2si_int
5539 = build_function_type (void_type_node,
5540 tree_cons (NULL_TREE, V2SI_type_node,
5541 tree_cons (NULL_TREE, pv2si_type_node,
5542 tree_cons (NULL_TREE,
5543 integer_type_node,
5544 endlink))));
5545
5546 tree void_ftype_v2si_pv2si_char
5547 = build_function_type (void_type_node,
5548 tree_cons (NULL_TREE, V2SI_type_node,
5549 tree_cons (NULL_TREE, pv2si_type_node,
5550 tree_cons (NULL_TREE,
5551 char_type_node,
5552 endlink))));
5553
5554 tree void_ftype_int
5555 = build_function_type (void_type_node,
5556 tree_cons (NULL_TREE, integer_type_node, endlink));
5557
5558 tree int_ftype_void
5559 = build_function_type (integer_type_node,
5560 tree_cons (NULL_TREE, void_type_node, endlink));
5561
5562 tree v2si_ftype_pv2si_int
5563 = build_function_type (V2SI_type_node,
5564 tree_cons (NULL_TREE, pv2si_type_node,
5565 tree_cons (NULL_TREE, integer_type_node,
5566 endlink)));
5567
5568 tree v2si_ftype_puint_int
5569 = build_function_type (V2SI_type_node,
5570 tree_cons (NULL_TREE, puint_type_node,
5571 tree_cons (NULL_TREE, integer_type_node,
5572 endlink)));
5573
5574 tree v2si_ftype_pushort_int
5575 = build_function_type (V2SI_type_node,
5576 tree_cons (NULL_TREE, pushort_type_node,
5577 tree_cons (NULL_TREE, integer_type_node,
5578 endlink)));
5579
5580 /* The initialization of the simple binary and unary builtins is
5581 done in rs6000_common_init_builtins, but we have to enable the
5582 mask bits here manually because we have run out of `target_flags'
5583 bits. We really need to redesign this mask business. */
5584
5585 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
5586 ARRAY_SIZE (bdesc_2arg),
5587 SPE_BUILTIN_EVADDW,
5588 SPE_BUILTIN_EVXOR);
5589 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
5590 ARRAY_SIZE (bdesc_1arg),
5591 SPE_BUILTIN_EVABS,
5592 SPE_BUILTIN_EVSUBFUSIAAW);
5593 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
5594 ARRAY_SIZE (bdesc_spe_predicates),
5595 SPE_BUILTIN_EVCMPEQ,
5596 SPE_BUILTIN_EVFSTSTLT);
5597 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
5598 ARRAY_SIZE (bdesc_spe_evsel),
5599 SPE_BUILTIN_EVSEL_CMPGTS,
5600 SPE_BUILTIN_EVSEL_FSTSTEQ);
5601
5602 /* Initialize irregular SPE builtins. */
5603
5604 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
5605 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
5606 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
5607 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
5608 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
5609 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
5610 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
5611 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
5612 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
5613 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
5614 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
5615 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
5616 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
5617 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
5618 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
5619 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
5620
5621 /* Loads. */
5622 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
5623 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
5624 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
5625 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
5626 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
5627 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
5628 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
5629 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
5630 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
5631 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
5632 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
5633 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
5634 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
5635 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
5636 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
5637 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
5638 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
5639 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
5640 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
5641 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
5642 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
5643 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
5644
5645 /* Predicates. */
5646 d = (struct builtin_description *) bdesc_spe_predicates;
5647 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
5648 {
5649 tree type;
5650
5651 switch (insn_data[d->icode].operand[1].mode)
5652 {
5653 case V2SImode:
5654 type = int_ftype_int_v2si_v2si;
5655 break;
5656 case V2SFmode:
5657 type = int_ftype_int_v2sf_v2sf;
5658 break;
5659 default:
5660 abort ();
5661 }
5662
5663 def_builtin (d->mask, d->name, type, d->code);
5664 }
5665
5666 /* Evsel predicates. */
5667 d = (struct builtin_description *) bdesc_spe_evsel;
5668 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
5669 {
5670 tree type;
5671
5672 switch (insn_data[d->icode].operand[1].mode)
5673 {
5674 case V2SImode:
5675 type = v2si_ftype_4_v2si;
5676 break;
5677 case V2SFmode:
5678 type = v2sf_ftype_4_v2sf;
5679 break;
5680 default:
5681 abort ();
5682 }
5683
5684 def_builtin (d->mask, d->name, type, d->code);
5685 }
5686 }
5687
5688 static void
5689 altivec_init_builtins ()
5690 {
5691 struct builtin_description *d;
5692 struct builtin_description_predicates *dp;
5693 size_t i;
5694 tree pfloat_type_node = build_pointer_type (float_type_node);
5695 tree pint_type_node = build_pointer_type (integer_type_node);
5696 tree pshort_type_node = build_pointer_type (short_integer_type_node);
5697 tree pchar_type_node = build_pointer_type (char_type_node);
5698
5699 tree pvoid_type_node = build_pointer_type (void_type_node);
5700
5701 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
5702 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
5703 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
5704 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
5705
5706 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
5707
5708 tree int_ftype_int_v4si_v4si
5709 = build_function_type_list (integer_type_node,
5710 integer_type_node, V4SI_type_node,
5711 V4SI_type_node, NULL_TREE);
5712 tree v4sf_ftype_pcfloat
5713 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
5714 tree void_ftype_pfloat_v4sf
5715 = build_function_type_list (void_type_node,
5716 pfloat_type_node, V4SF_type_node, NULL_TREE);
5717 tree v4si_ftype_pcint
5718 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
5719 tree void_ftype_pint_v4si
5720 = build_function_type_list (void_type_node,
5721 pint_type_node, V4SI_type_node, NULL_TREE);
5722 tree v8hi_ftype_pcshort
5723 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
5724 tree void_ftype_pshort_v8hi
5725 = build_function_type_list (void_type_node,
5726 pshort_type_node, V8HI_type_node, NULL_TREE);
5727 tree v16qi_ftype_pcchar
5728 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
5729 tree void_ftype_pchar_v16qi
5730 = build_function_type_list (void_type_node,
5731 pchar_type_node, V16QI_type_node, NULL_TREE);
5732 tree void_ftype_v4si
5733 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
5734 tree v8hi_ftype_void
5735 = build_function_type (V8HI_type_node, void_list_node);
5736 tree void_ftype_void
5737 = build_function_type (void_type_node, void_list_node);
5738 tree void_ftype_qi
5739 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
5740
5741 tree v16qi_ftype_int_pcvoid
5742 = build_function_type_list (V16QI_type_node,
5743 integer_type_node, pcvoid_type_node, NULL_TREE);
5744 tree v8hi_ftype_int_pcvoid
5745 = build_function_type_list (V8HI_type_node,
5746 integer_type_node, pcvoid_type_node, NULL_TREE);
5747 tree v4si_ftype_int_pcvoid
5748 = build_function_type_list (V4SI_type_node,
5749 integer_type_node, pcvoid_type_node, NULL_TREE);
5750
5751 tree void_ftype_v4si_int_pvoid
5752 = build_function_type_list (void_type_node,
5753 V4SI_type_node, integer_type_node,
5754 pvoid_type_node, NULL_TREE);
5755 tree void_ftype_v16qi_int_pvoid
5756 = build_function_type_list (void_type_node,
5757 V16QI_type_node, integer_type_node,
5758 pvoid_type_node, NULL_TREE);
5759 tree void_ftype_v8hi_int_pvoid
5760 = build_function_type_list (void_type_node,
5761 V8HI_type_node, integer_type_node,
5762 pvoid_type_node, NULL_TREE);
5763 tree int_ftype_int_v8hi_v8hi
5764 = build_function_type_list (integer_type_node,
5765 integer_type_node, V8HI_type_node,
5766 V8HI_type_node, NULL_TREE);
5767 tree int_ftype_int_v16qi_v16qi
5768 = build_function_type_list (integer_type_node,
5769 integer_type_node, V16QI_type_node,
5770 V16QI_type_node, NULL_TREE);
5771 tree int_ftype_int_v4sf_v4sf
5772 = build_function_type_list (integer_type_node,
5773 integer_type_node, V4SF_type_node,
5774 V4SF_type_node, NULL_TREE);
5775 tree v4si_ftype_v4si
5776 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
5777 tree v8hi_ftype_v8hi
5778 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
5779 tree v16qi_ftype_v16qi
5780 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
5781 tree v4sf_ftype_v4sf
5782 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
5783 tree void_ftype_pcvoid_int_char
5784 = build_function_type_list (void_type_node,
5785 pcvoid_type_node, integer_type_node,
5786 char_type_node, NULL_TREE);
5787
5788 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
5789 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
5790 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
5791 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
5792 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
5793 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
5794 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
5795 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
5796 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
5797 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
5798 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
5799 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
5800 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
5801 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
5802 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
5803 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
5804 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
5805 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
5806 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
5807 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
5808 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSL);
5809 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSR);
5810 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEBX);
5811 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEHX);
5812 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEWX);
5813 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVXL);
5814 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVX);
5815 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
5816 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
5817 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
5818 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
5819 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
5820
5821 /* Add the DST variants. */
5822 d = (struct builtin_description *) bdesc_dst;
5823 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
5824 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_char, d->code);
5825
5826 /* Initialize the predicates. */
5827 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
5828 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
5829 {
5830 enum machine_mode mode1;
5831 tree type;
5832
5833 mode1 = insn_data[dp->icode].operand[1].mode;
5834
5835 switch (mode1)
5836 {
5837 case V4SImode:
5838 type = int_ftype_int_v4si_v4si;
5839 break;
5840 case V8HImode:
5841 type = int_ftype_int_v8hi_v8hi;
5842 break;
5843 case V16QImode:
5844 type = int_ftype_int_v16qi_v16qi;
5845 break;
5846 case V4SFmode:
5847 type = int_ftype_int_v4sf_v4sf;
5848 break;
5849 default:
5850 abort ();
5851 }
5852
5853 def_builtin (dp->mask, dp->name, type, dp->code);
5854 }
5855
5856 /* Initialize the abs* operators. */
5857 d = (struct builtin_description *) bdesc_abs;
5858 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
5859 {
5860 enum machine_mode mode0;
5861 tree type;
5862
5863 mode0 = insn_data[d->icode].operand[0].mode;
5864
5865 switch (mode0)
5866 {
5867 case V4SImode:
5868 type = v4si_ftype_v4si;
5869 break;
5870 case V8HImode:
5871 type = v8hi_ftype_v8hi;
5872 break;
5873 case V16QImode:
5874 type = v16qi_ftype_v16qi;
5875 break;
5876 case V4SFmode:
5877 type = v4sf_ftype_v4sf;
5878 break;
5879 default:
5880 abort ();
5881 }
5882
5883 def_builtin (d->mask, d->name, type, d->code);
5884 }
5885 }
5886
5887 static void
5888 rs6000_common_init_builtins ()
5889 {
5890 struct builtin_description *d;
5891 size_t i;
5892
5893 tree v4sf_ftype_v4sf_v4sf_v16qi
5894 = build_function_type_list (V4SF_type_node,
5895 V4SF_type_node, V4SF_type_node,
5896 V16QI_type_node, NULL_TREE);
5897 tree v4si_ftype_v4si_v4si_v16qi
5898 = build_function_type_list (V4SI_type_node,
5899 V4SI_type_node, V4SI_type_node,
5900 V16QI_type_node, NULL_TREE);
5901 tree v8hi_ftype_v8hi_v8hi_v16qi
5902 = build_function_type_list (V8HI_type_node,
5903 V8HI_type_node, V8HI_type_node,
5904 V16QI_type_node, NULL_TREE);
5905 tree v16qi_ftype_v16qi_v16qi_v16qi
5906 = build_function_type_list (V16QI_type_node,
5907 V16QI_type_node, V16QI_type_node,
5908 V16QI_type_node, NULL_TREE);
5909 tree v4si_ftype_char
5910 = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
5911 tree v8hi_ftype_char
5912 = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
5913 tree v16qi_ftype_char
5914 = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
5915 tree v8hi_ftype_v16qi
5916 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
5917 tree v4sf_ftype_v4sf
5918 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
5919
5920 tree v2si_ftype_v2si_v2si
5921 = build_function_type_list (V2SI_type_node,
5922 V2SI_type_node, V2SI_type_node, NULL_TREE);
5923
5924 tree v2sf_ftype_v2sf_v2sf
5925 = build_function_type_list (V2SF_type_node,
5926 V2SF_type_node, V2SF_type_node, NULL_TREE);
5927
5928 tree v2si_ftype_int_int
5929 = build_function_type_list (V2SI_type_node,
5930 integer_type_node, integer_type_node,
5931 NULL_TREE);
5932
5933 tree v2si_ftype_v2si
5934 = build_function_type_list (V2SI_type_node, V2SI_type_node, NULL_TREE);
5935
5936 tree v2sf_ftype_v2sf
5937 = build_function_type_list (V2SF_type_node,
5938 V2SF_type_node, NULL_TREE);
5939
5940 tree v2sf_ftype_v2si
5941 = build_function_type_list (V2SF_type_node,
5942 V2SI_type_node, NULL_TREE);
5943
5944 tree v2si_ftype_v2sf
5945 = build_function_type_list (V2SI_type_node,
5946 V2SF_type_node, NULL_TREE);
5947
5948 tree v2si_ftype_v2si_char
5949 = build_function_type_list (V2SI_type_node,
5950 V2SI_type_node, char_type_node, NULL_TREE);
5951
5952 tree v2si_ftype_int_char
5953 = build_function_type_list (V2SI_type_node,
5954 integer_type_node, char_type_node, NULL_TREE);
5955
5956 tree v2si_ftype_char
5957 = build_function_type_list (V2SI_type_node, char_type_node, NULL_TREE);
5958
5959 tree int_ftype_int_int
5960 = build_function_type_list (integer_type_node,
5961 integer_type_node, integer_type_node,
5962 NULL_TREE);
5963
5964 tree v4si_ftype_v4si_v4si
5965 = build_function_type_list (V4SI_type_node,
5966 V4SI_type_node, V4SI_type_node, NULL_TREE);
5967 tree v4sf_ftype_v4si_char
5968 = build_function_type_list (V4SF_type_node,
5969 V4SI_type_node, char_type_node, NULL_TREE);
5970 tree v4si_ftype_v4sf_char
5971 = build_function_type_list (V4SI_type_node,
5972 V4SF_type_node, char_type_node, NULL_TREE);
5973 tree v4si_ftype_v4si_char
5974 = build_function_type_list (V4SI_type_node,
5975 V4SI_type_node, char_type_node, NULL_TREE);
5976 tree v8hi_ftype_v8hi_char
5977 = build_function_type_list (V8HI_type_node,
5978 V8HI_type_node, char_type_node, NULL_TREE);
5979 tree v16qi_ftype_v16qi_char
5980 = build_function_type_list (V16QI_type_node,
5981 V16QI_type_node, char_type_node, NULL_TREE);
5982 tree v16qi_ftype_v16qi_v16qi_char
5983 = build_function_type_list (V16QI_type_node,
5984 V16QI_type_node, V16QI_type_node,
5985 char_type_node, NULL_TREE);
5986 tree v8hi_ftype_v8hi_v8hi_char
5987 = build_function_type_list (V8HI_type_node,
5988 V8HI_type_node, V8HI_type_node,
5989 char_type_node, NULL_TREE);
5990 tree v4si_ftype_v4si_v4si_char
5991 = build_function_type_list (V4SI_type_node,
5992 V4SI_type_node, V4SI_type_node,
5993 char_type_node, NULL_TREE);
5994 tree v4sf_ftype_v4sf_v4sf_char
5995 = build_function_type_list (V4SF_type_node,
5996 V4SF_type_node, V4SF_type_node,
5997 char_type_node, NULL_TREE);
5998 tree v4sf_ftype_v4sf_v4sf
5999 = build_function_type_list (V4SF_type_node,
6000 V4SF_type_node, V4SF_type_node, NULL_TREE);
6001 tree v4sf_ftype_v4sf_v4sf_v4si
6002 = build_function_type_list (V4SF_type_node,
6003 V4SF_type_node, V4SF_type_node,
6004 V4SI_type_node, NULL_TREE);
6005 tree v4sf_ftype_v4sf_v4sf_v4sf
6006 = build_function_type_list (V4SF_type_node,
6007 V4SF_type_node, V4SF_type_node,
6008 V4SF_type_node, NULL_TREE);
6009 tree v4si_ftype_v4si_v4si_v4si
6010 = build_function_type_list (V4SI_type_node,
6011 V4SI_type_node, V4SI_type_node,
6012 V4SI_type_node, NULL_TREE);
6013 tree v8hi_ftype_v8hi_v8hi
6014 = build_function_type_list (V8HI_type_node,
6015 V8HI_type_node, V8HI_type_node, NULL_TREE);
6016 tree v8hi_ftype_v8hi_v8hi_v8hi
6017 = build_function_type_list (V8HI_type_node,
6018 V8HI_type_node, V8HI_type_node,
6019 V8HI_type_node, NULL_TREE);
6020 tree v4si_ftype_v8hi_v8hi_v4si
6021 = build_function_type_list (V4SI_type_node,
6022 V8HI_type_node, V8HI_type_node,
6023 V4SI_type_node, NULL_TREE);
6024 tree v4si_ftype_v16qi_v16qi_v4si
6025 = build_function_type_list (V4SI_type_node,
6026 V16QI_type_node, V16QI_type_node,
6027 V4SI_type_node, NULL_TREE);
6028 tree v16qi_ftype_v16qi_v16qi
6029 = build_function_type_list (V16QI_type_node,
6030 V16QI_type_node, V16QI_type_node, NULL_TREE);
6031 tree v4si_ftype_v4sf_v4sf
6032 = build_function_type_list (V4SI_type_node,
6033 V4SF_type_node, V4SF_type_node, NULL_TREE);
6034 tree v8hi_ftype_v16qi_v16qi
6035 = build_function_type_list (V8HI_type_node,
6036 V16QI_type_node, V16QI_type_node, NULL_TREE);
6037 tree v4si_ftype_v8hi_v8hi
6038 = build_function_type_list (V4SI_type_node,
6039 V8HI_type_node, V8HI_type_node, NULL_TREE);
6040 tree v8hi_ftype_v4si_v4si
6041 = build_function_type_list (V8HI_type_node,
6042 V4SI_type_node, V4SI_type_node, NULL_TREE);
6043 tree v16qi_ftype_v8hi_v8hi
6044 = build_function_type_list (V16QI_type_node,
6045 V8HI_type_node, V8HI_type_node, NULL_TREE);
6046 tree v4si_ftype_v16qi_v4si
6047 = build_function_type_list (V4SI_type_node,
6048 V16QI_type_node, V4SI_type_node, NULL_TREE);
6049 tree v4si_ftype_v16qi_v16qi
6050 = build_function_type_list (V4SI_type_node,
6051 V16QI_type_node, V16QI_type_node, NULL_TREE);
6052 tree v4si_ftype_v8hi_v4si
6053 = build_function_type_list (V4SI_type_node,
6054 V8HI_type_node, V4SI_type_node, NULL_TREE);
6055 tree v4si_ftype_v8hi
6056 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
6057 tree int_ftype_v4si_v4si
6058 = build_function_type_list (integer_type_node,
6059 V4SI_type_node, V4SI_type_node, NULL_TREE);
6060 tree int_ftype_v4sf_v4sf
6061 = build_function_type_list (integer_type_node,
6062 V4SF_type_node, V4SF_type_node, NULL_TREE);
6063 tree int_ftype_v16qi_v16qi
6064 = build_function_type_list (integer_type_node,
6065 V16QI_type_node, V16QI_type_node, NULL_TREE);
6066 tree int_ftype_v8hi_v8hi
6067 = build_function_type_list (integer_type_node,
6068 V8HI_type_node, V8HI_type_node, NULL_TREE);
6069
6070 /* Add the simple ternary operators. */
6071 d = (struct builtin_description *) bdesc_3arg;
6072 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
6073 {
6074
6075 enum machine_mode mode0, mode1, mode2, mode3;
6076 tree type;
6077
6078 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6079 continue;
6080
6081 mode0 = insn_data[d->icode].operand[0].mode;
6082 mode1 = insn_data[d->icode].operand[1].mode;
6083 mode2 = insn_data[d->icode].operand[2].mode;
6084 mode3 = insn_data[d->icode].operand[3].mode;
6085
6086 /* When all four are of the same mode. */
6087 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
6088 {
6089 switch (mode0)
6090 {
6091 case V4SImode:
6092 type = v4si_ftype_v4si_v4si_v4si;
6093 break;
6094 case V4SFmode:
6095 type = v4sf_ftype_v4sf_v4sf_v4sf;
6096 break;
6097 case V8HImode:
6098 type = v8hi_ftype_v8hi_v8hi_v8hi;
6099 break;
6100 case V16QImode:
6101 type = v16qi_ftype_v16qi_v16qi_v16qi;
6102 break;
6103 default:
6104 abort();
6105 }
6106 }
6107 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
6108 {
6109 switch (mode0)
6110 {
6111 case V4SImode:
6112 type = v4si_ftype_v4si_v4si_v16qi;
6113 break;
6114 case V4SFmode:
6115 type = v4sf_ftype_v4sf_v4sf_v16qi;
6116 break;
6117 case V8HImode:
6118 type = v8hi_ftype_v8hi_v8hi_v16qi;
6119 break;
6120 case V16QImode:
6121 type = v16qi_ftype_v16qi_v16qi_v16qi;
6122 break;
6123 default:
6124 abort();
6125 }
6126 }
6127 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
6128 && mode3 == V4SImode)
6129 type = v4si_ftype_v16qi_v16qi_v4si;
6130 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
6131 && mode3 == V4SImode)
6132 type = v4si_ftype_v8hi_v8hi_v4si;
6133 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
6134 && mode3 == V4SImode)
6135 type = v4sf_ftype_v4sf_v4sf_v4si;
6136
6137 /* vchar, vchar, vchar, 4 bit literal. */
6138 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
6139 && mode3 == QImode)
6140 type = v16qi_ftype_v16qi_v16qi_char;
6141
6142 /* vshort, vshort, vshort, 4 bit literal. */
6143 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
6144 && mode3 == QImode)
6145 type = v8hi_ftype_v8hi_v8hi_char;
6146
6147 /* vint, vint, vint, 4 bit literal. */
6148 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
6149 && mode3 == QImode)
6150 type = v4si_ftype_v4si_v4si_char;
6151
6152 /* vfloat, vfloat, vfloat, 4 bit literal. */
6153 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
6154 && mode3 == QImode)
6155 type = v4sf_ftype_v4sf_v4sf_char;
6156
6157 else
6158 abort ();
6159
6160 def_builtin (d->mask, d->name, type, d->code);
6161 }
6162
6163 /* Add the simple binary operators. */
6164 d = (struct builtin_description *) bdesc_2arg;
6165 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6166 {
6167 enum machine_mode mode0, mode1, mode2;
6168 tree type;
6169
6170 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6171 continue;
6172
6173 mode0 = insn_data[d->icode].operand[0].mode;
6174 mode1 = insn_data[d->icode].operand[1].mode;
6175 mode2 = insn_data[d->icode].operand[2].mode;
6176
6177 /* When all three operands are of the same mode. */
6178 if (mode0 == mode1 && mode1 == mode2)
6179 {
6180 switch (mode0)
6181 {
6182 case V4SFmode:
6183 type = v4sf_ftype_v4sf_v4sf;
6184 break;
6185 case V4SImode:
6186 type = v4si_ftype_v4si_v4si;
6187 break;
6188 case V16QImode:
6189 type = v16qi_ftype_v16qi_v16qi;
6190 break;
6191 case V8HImode:
6192 type = v8hi_ftype_v8hi_v8hi;
6193 break;
6194 case V2SImode:
6195 type = v2si_ftype_v2si_v2si;
6196 break;
6197 case V2SFmode:
6198 type = v2sf_ftype_v2sf_v2sf;
6199 break;
6200 case SImode:
6201 type = int_ftype_int_int;
6202 break;
6203 default:
6204 abort ();
6205 }
6206 }
6207
6208 /* A few other combos we really don't want to do manually. */
6209
6210 /* vint, vfloat, vfloat. */
6211 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
6212 type = v4si_ftype_v4sf_v4sf;
6213
6214 /* vshort, vchar, vchar. */
6215 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
6216 type = v8hi_ftype_v16qi_v16qi;
6217
6218 /* vint, vshort, vshort. */
6219 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
6220 type = v4si_ftype_v8hi_v8hi;
6221
6222 /* vshort, vint, vint. */
6223 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
6224 type = v8hi_ftype_v4si_v4si;
6225
6226 /* vchar, vshort, vshort. */
6227 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
6228 type = v16qi_ftype_v8hi_v8hi;
6229
6230 /* vint, vchar, vint. */
6231 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
6232 type = v4si_ftype_v16qi_v4si;
6233
6234 /* vint, vchar, vchar. */
6235 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
6236 type = v4si_ftype_v16qi_v16qi;
6237
6238 /* vint, vshort, vint. */
6239 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
6240 type = v4si_ftype_v8hi_v4si;
6241
6242 /* vint, vint, 5 bit literal. */
6243 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
6244 type = v4si_ftype_v4si_char;
6245
6246 /* vshort, vshort, 5 bit literal. */
6247 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
6248 type = v8hi_ftype_v8hi_char;
6249
6250 /* vchar, vchar, 5 bit literal. */
6251 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
6252 type = v16qi_ftype_v16qi_char;
6253
6254 /* vfloat, vint, 5 bit literal. */
6255 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
6256 type = v4sf_ftype_v4si_char;
6257
6258 /* vint, vfloat, 5 bit literal. */
6259 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
6260 type = v4si_ftype_v4sf_char;
6261
6262 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
6263 type = v2si_ftype_int_int;
6264
6265 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
6266 type = v2si_ftype_v2si_char;
6267
6268 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
6269 type = v2si_ftype_int_char;
6270
6271 /* int, x, x. */
6272 else if (mode0 == SImode)
6273 {
6274 switch (mode1)
6275 {
6276 case V4SImode:
6277 type = int_ftype_v4si_v4si;
6278 break;
6279 case V4SFmode:
6280 type = int_ftype_v4sf_v4sf;
6281 break;
6282 case V16QImode:
6283 type = int_ftype_v16qi_v16qi;
6284 break;
6285 case V8HImode:
6286 type = int_ftype_v8hi_v8hi;
6287 break;
6288 default:
6289 abort ();
6290 }
6291 }
6292
6293 else
6294 abort ();
6295
6296 def_builtin (d->mask, d->name, type, d->code);
6297 }
6298
6299 /* Add the simple unary operators. */
6300 d = (struct builtin_description *) bdesc_1arg;
6301 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6302 {
6303 enum machine_mode mode0, mode1;
6304 tree type;
6305
6306 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6307 continue;
6308
6309 mode0 = insn_data[d->icode].operand[0].mode;
6310 mode1 = insn_data[d->icode].operand[1].mode;
6311
6312 if (mode0 == V4SImode && mode1 == QImode)
6313 type = v4si_ftype_char;
6314 else if (mode0 == V8HImode && mode1 == QImode)
6315 type = v8hi_ftype_char;
6316 else if (mode0 == V16QImode && mode1 == QImode)
6317 type = v16qi_ftype_char;
6318 else if (mode0 == V4SFmode && mode1 == V4SFmode)
6319 type = v4sf_ftype_v4sf;
6320 else if (mode0 == V8HImode && mode1 == V16QImode)
6321 type = v8hi_ftype_v16qi;
6322 else if (mode0 == V4SImode && mode1 == V8HImode)
6323 type = v4si_ftype_v8hi;
6324 else if (mode0 == V2SImode && mode1 == V2SImode)
6325 type = v2si_ftype_v2si;
6326 else if (mode0 == V2SFmode && mode1 == V2SFmode)
6327 type = v2sf_ftype_v2sf;
6328 else if (mode0 == V2SFmode && mode1 == V2SImode)
6329 type = v2sf_ftype_v2si;
6330 else if (mode0 == V2SImode && mode1 == V2SFmode)
6331 type = v2si_ftype_v2sf;
6332 else if (mode0 == V2SImode && mode1 == QImode)
6333 type = v2si_ftype_char;
6334 else
6335 abort ();
6336
6337 def_builtin (d->mask, d->name, type, d->code);
6338 }
6339 }
6340
6341 \f
6342 /* Expand a block move operation, and return 1 if successful. Return 0
6343 if we should let the compiler generate normal code.
6344
6345 operands[0] is the destination
6346 operands[1] is the source
6347 operands[2] is the length
6348 operands[3] is the alignment */
6349
6350 #define MAX_MOVE_REG 4
6351
6352 int
6353 expand_block_move (operands)
6354 rtx operands[];
6355 {
6356 rtx orig_dest = operands[0];
6357 rtx orig_src = operands[1];
6358 rtx bytes_rtx = operands[2];
6359 rtx align_rtx = operands[3];
6360 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
6361 int align;
6362 int bytes;
6363 int offset;
6364 int move_bytes;
6365
6366 /* If this is not a fixed size move, just call memcpy */
6367 if (! constp)
6368 return 0;
6369
6370 /* If this is not a fixed size alignment, abort */
6371 if (GET_CODE (align_rtx) != CONST_INT)
6372 abort ();
6373 align = INTVAL (align_rtx);
6374
6375 /* Anything to move? */
6376 bytes = INTVAL (bytes_rtx);
6377 if (bytes <= 0)
6378 return 1;
6379
6380 /* store_one_arg depends on expand_block_move to handle at least the size of
6381 reg_parm_stack_space. */
6382 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
6383 return 0;
6384
6385 if (TARGET_STRING) /* string instructions are available */
6386 {
6387 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6388 {
6389 union {
6390 rtx (*movstrsi) PARAMS ((rtx, rtx, rtx, rtx));
6391 rtx (*mov) PARAMS ((rtx, rtx));
6392 } gen_func;
6393 enum machine_mode mode = BLKmode;
6394 rtx src, dest;
6395
6396 if (bytes > 24 /* move up to 32 bytes at a time */
6397 && ! fixed_regs[5]
6398 && ! fixed_regs[6]
6399 && ! fixed_regs[7]
6400 && ! fixed_regs[8]
6401 && ! fixed_regs[9]
6402 && ! fixed_regs[10]
6403 && ! fixed_regs[11]
6404 && ! fixed_regs[12])
6405 {
6406 move_bytes = (bytes > 32) ? 32 : bytes;
6407 gen_func.movstrsi = gen_movstrsi_8reg;
6408 }
6409 else if (bytes > 16 /* move up to 24 bytes at a time */
6410 && ! fixed_regs[5]
6411 && ! fixed_regs[6]
6412 && ! fixed_regs[7]
6413 && ! fixed_regs[8]
6414 && ! fixed_regs[9]
6415 && ! fixed_regs[10])
6416 {
6417 move_bytes = (bytes > 24) ? 24 : bytes;
6418 gen_func.movstrsi = gen_movstrsi_6reg;
6419 }
6420 else if (bytes > 8 /* move up to 16 bytes at a time */
6421 && ! fixed_regs[5]
6422 && ! fixed_regs[6]
6423 && ! fixed_regs[7]
6424 && ! fixed_regs[8])
6425 {
6426 move_bytes = (bytes > 16) ? 16 : bytes;
6427 gen_func.movstrsi = gen_movstrsi_4reg;
6428 }
6429 else if (bytes >= 8 && TARGET_POWERPC64
6430 /* 64-bit loads and stores require word-aligned
6431 displacements. */
6432 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6433 {
6434 move_bytes = 8;
6435 mode = DImode;
6436 gen_func.mov = gen_movdi;
6437 }
6438 else if (bytes > 4 && !TARGET_POWERPC64)
6439 { /* move up to 8 bytes at a time */
6440 move_bytes = (bytes > 8) ? 8 : bytes;
6441 gen_func.movstrsi = gen_movstrsi_2reg;
6442 }
6443 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6444 { /* move 4 bytes */
6445 move_bytes = 4;
6446 mode = SImode;
6447 gen_func.mov = gen_movsi;
6448 }
6449 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6450 { /* move 2 bytes */
6451 move_bytes = 2;
6452 mode = HImode;
6453 gen_func.mov = gen_movhi;
6454 }
6455 else if (bytes == 1) /* move 1 byte */
6456 {
6457 move_bytes = 1;
6458 mode = QImode;
6459 gen_func.mov = gen_movqi;
6460 }
6461 else
6462 { /* move up to 4 bytes at a time */
6463 move_bytes = (bytes > 4) ? 4 : bytes;
6464 gen_func.movstrsi = gen_movstrsi_1reg;
6465 }
6466
6467 src = adjust_address (orig_src, mode, offset);
6468 dest = adjust_address (orig_dest, mode, offset);
6469
6470 if (mode == BLKmode)
6471 {
6472 /* Move the address into scratch registers. The movstrsi
6473 patterns require zero offset. */
6474 if (!REG_P (XEXP (src, 0)))
6475 {
6476 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
6477 src = replace_equiv_address (src, src_reg);
6478 }
6479 set_mem_size (src, GEN_INT (move_bytes));
6480
6481 if (!REG_P (XEXP (dest, 0)))
6482 {
6483 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
6484 dest = replace_equiv_address (dest, dest_reg);
6485 }
6486 set_mem_size (dest, GEN_INT (move_bytes));
6487
6488 emit_insn ((*gen_func.movstrsi) (dest, src,
6489 GEN_INT (move_bytes & 31),
6490 align_rtx));
6491 }
6492 else
6493 {
6494 rtx tmp_reg = gen_reg_rtx (mode);
6495
6496 emit_insn ((*gen_func.mov) (tmp_reg, src));
6497 emit_insn ((*gen_func.mov) (dest, tmp_reg));
6498 }
6499 }
6500 }
6501
6502 else /* string instructions not available */
6503 {
6504 rtx stores[MAX_MOVE_REG];
6505 int num_reg = 0;
6506 int i;
6507
6508 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6509 {
6510 rtx (*gen_mov_func) PARAMS ((rtx, rtx));
6511 enum machine_mode mode;
6512 rtx src, dest, tmp_reg;
6513
6514 /* Generate the appropriate load and store, saving the stores
6515 for later. */
6516 if (bytes >= 8 && TARGET_POWERPC64
6517 /* 64-bit loads and stores require word-aligned
6518 displacements. */
6519 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6520 {
6521 move_bytes = 8;
6522 mode = DImode;
6523 gen_mov_func = gen_movdi;
6524 }
6525 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6526 {
6527 move_bytes = 4;
6528 mode = SImode;
6529 gen_mov_func = gen_movsi;
6530 }
6531 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6532 {
6533 move_bytes = 2;
6534 mode = HImode;
6535 gen_mov_func = gen_movhi;
6536 }
6537 else
6538 {
6539 move_bytes = 1;
6540 mode = QImode;
6541 gen_mov_func = gen_movqi;
6542 }
6543
6544 src = adjust_address (orig_src, mode, offset);
6545 dest = adjust_address (orig_dest, mode, offset);
6546 tmp_reg = gen_reg_rtx (mode);
6547
6548 emit_insn ((*gen_mov_func) (tmp_reg, src));
6549 stores[num_reg++] = (*gen_mov_func) (dest, tmp_reg);
6550
6551 if (num_reg >= MAX_MOVE_REG)
6552 {
6553 for (i = 0; i < num_reg; i++)
6554 emit_insn (stores[i]);
6555 num_reg = 0;
6556 }
6557 }
6558
6559 for (i = 0; i < num_reg; i++)
6560 emit_insn (stores[i]);
6561 }
6562
6563 return 1;
6564 }
6565
6566 \f
6567 /* Return 1 if OP is a load multiple operation. It is known to be a
6568 PARALLEL and the first section will be tested. */
6569
6570 int
6571 load_multiple_operation (op, mode)
6572 rtx op;
6573 enum machine_mode mode ATTRIBUTE_UNUSED;
6574 {
6575 int count = XVECLEN (op, 0);
6576 unsigned int dest_regno;
6577 rtx src_addr;
6578 int i;
6579
6580 /* Perform a quick check so we don't blow up below. */
6581 if (count <= 1
6582 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6583 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6584 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
6585 return 0;
6586
6587 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6588 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
6589
6590 for (i = 1; i < count; i++)
6591 {
6592 rtx elt = XVECEXP (op, 0, i);
6593
6594 if (GET_CODE (elt) != SET
6595 || GET_CODE (SET_DEST (elt)) != REG
6596 || GET_MODE (SET_DEST (elt)) != SImode
6597 || REGNO (SET_DEST (elt)) != dest_regno + i
6598 || GET_CODE (SET_SRC (elt)) != MEM
6599 || GET_MODE (SET_SRC (elt)) != SImode
6600 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
6601 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
6602 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
6603 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
6604 return 0;
6605 }
6606
6607 return 1;
6608 }
6609
6610 /* Similar, but tests for store multiple. Here, the second vector element
6611 is a CLOBBER. It will be tested later. */
6612
6613 int
6614 store_multiple_operation (op, mode)
6615 rtx op;
6616 enum machine_mode mode ATTRIBUTE_UNUSED;
6617 {
6618 int count = XVECLEN (op, 0) - 1;
6619 unsigned int src_regno;
6620 rtx dest_addr;
6621 int i;
6622
6623 /* Perform a quick check so we don't blow up below. */
6624 if (count <= 1
6625 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6626 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
6627 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
6628 return 0;
6629
6630 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6631 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
6632
6633 for (i = 1; i < count; i++)
6634 {
6635 rtx elt = XVECEXP (op, 0, i + 1);
6636
6637 if (GET_CODE (elt) != SET
6638 || GET_CODE (SET_SRC (elt)) != REG
6639 || GET_MODE (SET_SRC (elt)) != SImode
6640 || REGNO (SET_SRC (elt)) != src_regno + i
6641 || GET_CODE (SET_DEST (elt)) != MEM
6642 || GET_MODE (SET_DEST (elt)) != SImode
6643 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
6644 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
6645 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
6646 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
6647 return 0;
6648 }
6649
6650 return 1;
6651 }
6652
6653 /* Return a string to perform a load_multiple operation.
6654 operands[0] is the vector.
6655 operands[1] is the source address.
6656 operands[2] is the first destination register. */
6657
6658 const char *
6659 rs6000_output_load_multiple (operands)
6660 rtx operands[3];
6661 {
6662 /* We have to handle the case where the pseudo used to contain the address
6663 is assigned to one of the output registers. */
6664 int i, j;
6665 int words = XVECLEN (operands[0], 0);
6666 rtx xop[10];
6667
6668 if (XVECLEN (operands[0], 0) == 1)
6669 return "{l|lwz} %2,0(%1)";
6670
6671 for (i = 0; i < words; i++)
6672 if (refers_to_regno_p (REGNO (operands[2]) + i,
6673 REGNO (operands[2]) + i + 1, operands[1], 0))
6674 {
6675 if (i == words-1)
6676 {
6677 xop[0] = GEN_INT (4 * (words-1));
6678 xop[1] = operands[1];
6679 xop[2] = operands[2];
6680 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
6681 return "";
6682 }
6683 else if (i == 0)
6684 {
6685 xop[0] = GEN_INT (4 * (words-1));
6686 xop[1] = operands[1];
6687 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
6688 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
6689 return "";
6690 }
6691 else
6692 {
6693 for (j = 0; j < words; j++)
6694 if (j != i)
6695 {
6696 xop[0] = GEN_INT (j * 4);
6697 xop[1] = operands[1];
6698 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
6699 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
6700 }
6701 xop[0] = GEN_INT (i * 4);
6702 xop[1] = operands[1];
6703 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
6704 return "";
6705 }
6706 }
6707
6708 return "{lsi|lswi} %2,%1,%N0";
6709 }
6710
6711 /* Return 1 for a parallel vrsave operation. */
6712
6713 int
6714 vrsave_operation (op, mode)
6715 rtx op;
6716 enum machine_mode mode ATTRIBUTE_UNUSED;
6717 {
6718 int count = XVECLEN (op, 0);
6719 unsigned int dest_regno, src_regno;
6720 int i;
6721
6722 if (count <= 1
6723 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6724 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6725 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
6726 return 0;
6727
6728 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6729 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6730
6731 if (dest_regno != VRSAVE_REGNO
6732 && src_regno != VRSAVE_REGNO)
6733 return 0;
6734
6735 for (i = 1; i < count; i++)
6736 {
6737 rtx elt = XVECEXP (op, 0, i);
6738
6739 if (GET_CODE (elt) != CLOBBER
6740 && GET_CODE (elt) != SET)
6741 return 0;
6742 }
6743
6744 return 1;
6745 }
6746
6747 /* Return 1 for an PARALLEL suitable for mtcrf. */
6748
6749 int
6750 mtcrf_operation (op, mode)
6751 rtx op;
6752 enum machine_mode mode ATTRIBUTE_UNUSED;
6753 {
6754 int count = XVECLEN (op, 0);
6755 int i;
6756 rtx src_reg;
6757
6758 /* Perform a quick check so we don't blow up below. */
6759 if (count < 1
6760 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6761 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
6762 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
6763 return 0;
6764 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
6765
6766 if (GET_CODE (src_reg) != REG
6767 || GET_MODE (src_reg) != SImode
6768 || ! INT_REGNO_P (REGNO (src_reg)))
6769 return 0;
6770
6771 for (i = 0; i < count; i++)
6772 {
6773 rtx exp = XVECEXP (op, 0, i);
6774 rtx unspec;
6775 int maskval;
6776
6777 if (GET_CODE (exp) != SET
6778 || GET_CODE (SET_DEST (exp)) != REG
6779 || GET_MODE (SET_DEST (exp)) != CCmode
6780 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
6781 return 0;
6782 unspec = SET_SRC (exp);
6783 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
6784
6785 if (GET_CODE (unspec) != UNSPEC
6786 || XINT (unspec, 1) != 20
6787 || XVECLEN (unspec, 0) != 2
6788 || XVECEXP (unspec, 0, 0) != src_reg
6789 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
6790 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
6791 return 0;
6792 }
6793 return 1;
6794 }
6795
6796 /* Return 1 for an PARALLEL suitable for lmw. */
6797
6798 int
6799 lmw_operation (op, mode)
6800 rtx op;
6801 enum machine_mode mode ATTRIBUTE_UNUSED;
6802 {
6803 int count = XVECLEN (op, 0);
6804 unsigned int dest_regno;
6805 rtx src_addr;
6806 unsigned int base_regno;
6807 HOST_WIDE_INT offset;
6808 int i;
6809
6810 /* Perform a quick check so we don't blow up below. */
6811 if (count <= 1
6812 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6813 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
6814 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
6815 return 0;
6816
6817 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
6818 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
6819
6820 if (dest_regno > 31
6821 || count != 32 - (int) dest_regno)
6822 return 0;
6823
6824 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
6825 {
6826 offset = 0;
6827 base_regno = REGNO (src_addr);
6828 if (base_regno == 0)
6829 return 0;
6830 }
6831 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
6832 {
6833 offset = INTVAL (XEXP (src_addr, 1));
6834 base_regno = REGNO (XEXP (src_addr, 0));
6835 }
6836 else
6837 return 0;
6838
6839 for (i = 0; i < count; i++)
6840 {
6841 rtx elt = XVECEXP (op, 0, i);
6842 rtx newaddr;
6843 rtx addr_reg;
6844 HOST_WIDE_INT newoffset;
6845
6846 if (GET_CODE (elt) != SET
6847 || GET_CODE (SET_DEST (elt)) != REG
6848 || GET_MODE (SET_DEST (elt)) != SImode
6849 || REGNO (SET_DEST (elt)) != dest_regno + i
6850 || GET_CODE (SET_SRC (elt)) != MEM
6851 || GET_MODE (SET_SRC (elt)) != SImode)
6852 return 0;
6853 newaddr = XEXP (SET_SRC (elt), 0);
6854 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
6855 {
6856 newoffset = 0;
6857 addr_reg = newaddr;
6858 }
6859 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
6860 {
6861 addr_reg = XEXP (newaddr, 0);
6862 newoffset = INTVAL (XEXP (newaddr, 1));
6863 }
6864 else
6865 return 0;
6866 if (REGNO (addr_reg) != base_regno
6867 || newoffset != offset + 4 * i)
6868 return 0;
6869 }
6870
6871 return 1;
6872 }
6873
6874 /* Return 1 for an PARALLEL suitable for stmw. */
6875
6876 int
6877 stmw_operation (op, mode)
6878 rtx op;
6879 enum machine_mode mode ATTRIBUTE_UNUSED;
6880 {
6881 int count = XVECLEN (op, 0);
6882 unsigned int src_regno;
6883 rtx dest_addr;
6884 unsigned int base_regno;
6885 HOST_WIDE_INT offset;
6886 int i;
6887
6888 /* Perform a quick check so we don't blow up below. */
6889 if (count <= 1
6890 || GET_CODE (XVECEXP (op, 0, 0)) != SET
6891 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
6892 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
6893 return 0;
6894
6895 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
6896 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
6897
6898 if (src_regno > 31
6899 || count != 32 - (int) src_regno)
6900 return 0;
6901
6902 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
6903 {
6904 offset = 0;
6905 base_regno = REGNO (dest_addr);
6906 if (base_regno == 0)
6907 return 0;
6908 }
6909 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
6910 {
6911 offset = INTVAL (XEXP (dest_addr, 1));
6912 base_regno = REGNO (XEXP (dest_addr, 0));
6913 }
6914 else
6915 return 0;
6916
6917 for (i = 0; i < count; i++)
6918 {
6919 rtx elt = XVECEXP (op, 0, i);
6920 rtx newaddr;
6921 rtx addr_reg;
6922 HOST_WIDE_INT newoffset;
6923
6924 if (GET_CODE (elt) != SET
6925 || GET_CODE (SET_SRC (elt)) != REG
6926 || GET_MODE (SET_SRC (elt)) != SImode
6927 || REGNO (SET_SRC (elt)) != src_regno + i
6928 || GET_CODE (SET_DEST (elt)) != MEM
6929 || GET_MODE (SET_DEST (elt)) != SImode)
6930 return 0;
6931 newaddr = XEXP (SET_DEST (elt), 0);
6932 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
6933 {
6934 newoffset = 0;
6935 addr_reg = newaddr;
6936 }
6937 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
6938 {
6939 addr_reg = XEXP (newaddr, 0);
6940 newoffset = INTVAL (XEXP (newaddr, 1));
6941 }
6942 else
6943 return 0;
6944 if (REGNO (addr_reg) != base_regno
6945 || newoffset != offset + 4 * i)
6946 return 0;
6947 }
6948
6949 return 1;
6950 }
6951 \f
6952 /* A validation routine: say whether CODE, a condition code, and MODE
6953 match. The other alternatives either don't make sense or should
6954 never be generated. */
6955
6956 static void
6957 validate_condition_mode (code, mode)
6958 enum rtx_code code;
6959 enum machine_mode mode;
6960 {
6961 if (GET_RTX_CLASS (code) != '<'
6962 || GET_MODE_CLASS (mode) != MODE_CC)
6963 abort ();
6964
6965 /* These don't make sense. */
6966 if ((code == GT || code == LT || code == GE || code == LE)
6967 && mode == CCUNSmode)
6968 abort ();
6969
6970 if ((code == GTU || code == LTU || code == GEU || code == LEU)
6971 && mode != CCUNSmode)
6972 abort ();
6973
6974 if (mode != CCFPmode
6975 && (code == ORDERED || code == UNORDERED
6976 || code == UNEQ || code == LTGT
6977 || code == UNGT || code == UNLT
6978 || code == UNGE || code == UNLE))
6979 abort ();
6980
6981 /* These should never be generated except for
6982 flag_unsafe_math_optimizations and flag_finite_math_only. */
6983 if (mode == CCFPmode
6984 && ! flag_unsafe_math_optimizations
6985 && ! flag_finite_math_only
6986 && (code == LE || code == GE
6987 || code == UNEQ || code == LTGT
6988 || code == UNGT || code == UNLT))
6989 abort ();
6990
6991 /* These are invalid; the information is not there. */
6992 if (mode == CCEQmode
6993 && code != EQ && code != NE)
6994 abort ();
6995 }
6996
6997 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
6998 We only check the opcode against the mode of the CC value here. */
6999
7000 int
7001 branch_comparison_operator (op, mode)
7002 rtx op;
7003 enum machine_mode mode ATTRIBUTE_UNUSED;
7004 {
7005 enum rtx_code code = GET_CODE (op);
7006 enum machine_mode cc_mode;
7007
7008 if (GET_RTX_CLASS (code) != '<')
7009 return 0;
7010
7011 cc_mode = GET_MODE (XEXP (op, 0));
7012 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
7013 return 0;
7014
7015 validate_condition_mode (code, cc_mode);
7016
7017 return 1;
7018 }
7019
7020 /* Return 1 if OP is a comparison operation that is valid for a branch
7021 insn and which is true if the corresponding bit in the CC register
7022 is set. */
7023
7024 int
7025 branch_positive_comparison_operator (op, mode)
7026 rtx op;
7027 enum machine_mode mode;
7028 {
7029 enum rtx_code code;
7030
7031 if (! branch_comparison_operator (op, mode))
7032 return 0;
7033
7034 code = GET_CODE (op);
7035 return (code == EQ || code == LT || code == GT
7036 || (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
7037 || code == LTU || code == GTU
7038 || code == UNORDERED);
7039 }
7040
7041 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
7042 We check the opcode against the mode of the CC value and disallow EQ or
7043 NE comparisons for integers. */
7044
7045 int
7046 scc_comparison_operator (op, mode)
7047 rtx op;
7048 enum machine_mode mode;
7049 {
7050 enum rtx_code code = GET_CODE (op);
7051 enum machine_mode cc_mode;
7052
7053 if (GET_MODE (op) != mode && mode != VOIDmode)
7054 return 0;
7055
7056 if (GET_RTX_CLASS (code) != '<')
7057 return 0;
7058
7059 cc_mode = GET_MODE (XEXP (op, 0));
7060 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
7061 return 0;
7062
7063 validate_condition_mode (code, cc_mode);
7064
7065 if (code == NE && cc_mode != CCFPmode)
7066 return 0;
7067
7068 return 1;
7069 }
7070
7071 int
7072 trap_comparison_operator (op, mode)
7073 rtx op;
7074 enum machine_mode mode;
7075 {
7076 if (mode != VOIDmode && mode != GET_MODE (op))
7077 return 0;
7078 return GET_RTX_CLASS (GET_CODE (op)) == '<';
7079 }
7080
7081 int
7082 boolean_operator (op, mode)
7083 rtx op;
7084 enum machine_mode mode ATTRIBUTE_UNUSED;
7085 {
7086 enum rtx_code code = GET_CODE (op);
7087 return (code == AND || code == IOR || code == XOR);
7088 }
7089
7090 int
7091 boolean_or_operator (op, mode)
7092 rtx op;
7093 enum machine_mode mode ATTRIBUTE_UNUSED;
7094 {
7095 enum rtx_code code = GET_CODE (op);
7096 return (code == IOR || code == XOR);
7097 }
7098
7099 int
7100 min_max_operator (op, mode)
7101 rtx op;
7102 enum machine_mode mode ATTRIBUTE_UNUSED;
7103 {
7104 enum rtx_code code = GET_CODE (op);
7105 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
7106 }
7107 \f
7108 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
7109 mask required to convert the result of a rotate insn into a shift
7110 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
7111
7112 int
7113 includes_lshift_p (shiftop, andop)
7114 rtx shiftop;
7115 rtx andop;
7116 {
7117 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
7118
7119 shift_mask <<= INTVAL (shiftop);
7120
7121 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
7122 }
7123
7124 /* Similar, but for right shift. */
7125
7126 int
7127 includes_rshift_p (shiftop, andop)
7128 rtx shiftop;
7129 rtx andop;
7130 {
7131 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
7132
7133 shift_mask >>= INTVAL (shiftop);
7134
7135 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
7136 }
7137
7138 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
7139 to perform a left shift. It must have exactly SHIFTOP least
7140 significant 0's, then one or more 1's, then zero or more 0's. */
7141
7142 int
7143 includes_rldic_lshift_p (shiftop, andop)
7144 rtx shiftop;
7145 rtx andop;
7146 {
7147 if (GET_CODE (andop) == CONST_INT)
7148 {
7149 HOST_WIDE_INT c, lsb, shift_mask;
7150
7151 c = INTVAL (andop);
7152 if (c == 0 || c == ~0)
7153 return 0;
7154
7155 shift_mask = ~0;
7156 shift_mask <<= INTVAL (shiftop);
7157
7158 /* Find the least significant one bit. */
7159 lsb = c & -c;
7160
7161 /* It must coincide with the LSB of the shift mask. */
7162 if (-lsb != shift_mask)
7163 return 0;
7164
7165 /* Invert to look for the next transition (if any). */
7166 c = ~c;
7167
7168 /* Remove the low group of ones (originally low group of zeros). */
7169 c &= -lsb;
7170
7171 /* Again find the lsb, and check we have all 1's above. */
7172 lsb = c & -c;
7173 return c == -lsb;
7174 }
7175 else if (GET_CODE (andop) == CONST_DOUBLE
7176 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7177 {
7178 HOST_WIDE_INT low, high, lsb;
7179 HOST_WIDE_INT shift_mask_low, shift_mask_high;
7180
7181 low = CONST_DOUBLE_LOW (andop);
7182 if (HOST_BITS_PER_WIDE_INT < 64)
7183 high = CONST_DOUBLE_HIGH (andop);
7184
7185 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
7186 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
7187 return 0;
7188
7189 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7190 {
7191 shift_mask_high = ~0;
7192 if (INTVAL (shiftop) > 32)
7193 shift_mask_high <<= INTVAL (shiftop) - 32;
7194
7195 lsb = high & -high;
7196
7197 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
7198 return 0;
7199
7200 high = ~high;
7201 high &= -lsb;
7202
7203 lsb = high & -high;
7204 return high == -lsb;
7205 }
7206
7207 shift_mask_low = ~0;
7208 shift_mask_low <<= INTVAL (shiftop);
7209
7210 lsb = low & -low;
7211
7212 if (-lsb != shift_mask_low)
7213 return 0;
7214
7215 if (HOST_BITS_PER_WIDE_INT < 64)
7216 high = ~high;
7217 low = ~low;
7218 low &= -lsb;
7219
7220 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7221 {
7222 lsb = high & -high;
7223 return high == -lsb;
7224 }
7225
7226 lsb = low & -low;
7227 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
7228 }
7229 else
7230 return 0;
7231 }
7232
7233 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
7234 to perform a left shift. It must have SHIFTOP or more least
7235 signifigant 0's, with the remainder of the word 1's. */
7236
7237 int
7238 includes_rldicr_lshift_p (shiftop, andop)
7239 rtx shiftop;
7240 rtx andop;
7241 {
7242 if (GET_CODE (andop) == CONST_INT)
7243 {
7244 HOST_WIDE_INT c, lsb, shift_mask;
7245
7246 shift_mask = ~0;
7247 shift_mask <<= INTVAL (shiftop);
7248 c = INTVAL (andop);
7249
7250 /* Find the least signifigant one bit. */
7251 lsb = c & -c;
7252
7253 /* It must be covered by the shift mask.
7254 This test also rejects c == 0. */
7255 if ((lsb & shift_mask) == 0)
7256 return 0;
7257
7258 /* Check we have all 1's above the transition, and reject all 1's. */
7259 return c == -lsb && lsb != 1;
7260 }
7261 else if (GET_CODE (andop) == CONST_DOUBLE
7262 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7263 {
7264 HOST_WIDE_INT low, lsb, shift_mask_low;
7265
7266 low = CONST_DOUBLE_LOW (andop);
7267
7268 if (HOST_BITS_PER_WIDE_INT < 64)
7269 {
7270 HOST_WIDE_INT high, shift_mask_high;
7271
7272 high = CONST_DOUBLE_HIGH (andop);
7273
7274 if (low == 0)
7275 {
7276 shift_mask_high = ~0;
7277 if (INTVAL (shiftop) > 32)
7278 shift_mask_high <<= INTVAL (shiftop) - 32;
7279
7280 lsb = high & -high;
7281
7282 if ((lsb & shift_mask_high) == 0)
7283 return 0;
7284
7285 return high == -lsb;
7286 }
7287 if (high != ~0)
7288 return 0;
7289 }
7290
7291 shift_mask_low = ~0;
7292 shift_mask_low <<= INTVAL (shiftop);
7293
7294 lsb = low & -low;
7295
7296 if ((lsb & shift_mask_low) == 0)
7297 return 0;
7298
7299 return low == -lsb && lsb != 1;
7300 }
7301 else
7302 return 0;
7303 }
7304
7305 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
7306 for lfq and stfq insns.
7307
7308 Note reg1 and reg2 *must* be hard registers. To be sure we will
7309 abort if we are passed pseudo registers. */
7310
7311 int
7312 registers_ok_for_quad_peep (reg1, reg2)
7313 rtx reg1, reg2;
7314 {
7315 /* We might have been passed a SUBREG. */
7316 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
7317 return 0;
7318
7319 return (REGNO (reg1) == REGNO (reg2) - 1);
7320 }
7321
7322 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
7323 addr1 and addr2 must be in consecutive memory locations
7324 (addr2 == addr1 + 8). */
7325
7326 int
7327 addrs_ok_for_quad_peep (addr1, addr2)
7328 rtx addr1;
7329 rtx addr2;
7330 {
7331 unsigned int reg1;
7332 int offset1;
7333
7334 /* Extract an offset (if used) from the first addr. */
7335 if (GET_CODE (addr1) == PLUS)
7336 {
7337 /* If not a REG, return zero. */
7338 if (GET_CODE (XEXP (addr1, 0)) != REG)
7339 return 0;
7340 else
7341 {
7342 reg1 = REGNO (XEXP (addr1, 0));
7343 /* The offset must be constant! */
7344 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
7345 return 0;
7346 offset1 = INTVAL (XEXP (addr1, 1));
7347 }
7348 }
7349 else if (GET_CODE (addr1) != REG)
7350 return 0;
7351 else
7352 {
7353 reg1 = REGNO (addr1);
7354 /* This was a simple (mem (reg)) expression. Offset is 0. */
7355 offset1 = 0;
7356 }
7357
7358 /* Make sure the second address is a (mem (plus (reg) (const_int)))
7359 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
7360 register as addr1. */
7361 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
7362 return 1;
7363 if (GET_CODE (addr2) != PLUS)
7364 return 0;
7365
7366 if (GET_CODE (XEXP (addr2, 0)) != REG
7367 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
7368 return 0;
7369
7370 if (reg1 != REGNO (XEXP (addr2, 0)))
7371 return 0;
7372
7373 /* The offset for the second addr must be 8 more than the first addr. */
7374 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
7375 return 0;
7376
7377 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7378 instructions. */
7379 return 1;
7380 }
7381 \f
7382 /* Return the register class of a scratch register needed to copy IN into
7383 or out of a register in CLASS in MODE. If it can be done directly,
7384 NO_REGS is returned. */
7385
7386 enum reg_class
7387 secondary_reload_class (class, mode, in)
7388 enum reg_class class;
7389 enum machine_mode mode ATTRIBUTE_UNUSED;
7390 rtx in;
7391 {
7392 int regno;
7393
7394 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
7395 #if TARGET_MACHO
7396 && MACHOPIC_INDIRECT
7397 #endif
7398 ))
7399 {
7400 /* We cannot copy a symbolic operand directly into anything
7401 other than BASE_REGS for TARGET_ELF. So indicate that a
7402 register from BASE_REGS is needed as an intermediate
7403 register.
7404
7405 On Darwin, pic addresses require a load from memory, which
7406 needs a base register. */
7407 if (class != BASE_REGS
7408 && (GET_CODE (in) == SYMBOL_REF
7409 || GET_CODE (in) == HIGH
7410 || GET_CODE (in) == LABEL_REF
7411 || GET_CODE (in) == CONST))
7412 return BASE_REGS;
7413 }
7414
7415 if (GET_CODE (in) == REG)
7416 {
7417 regno = REGNO (in);
7418 if (regno >= FIRST_PSEUDO_REGISTER)
7419 {
7420 regno = true_regnum (in);
7421 if (regno >= FIRST_PSEUDO_REGISTER)
7422 regno = -1;
7423 }
7424 }
7425 else if (GET_CODE (in) == SUBREG)
7426 {
7427 regno = true_regnum (in);
7428 if (regno >= FIRST_PSEUDO_REGISTER)
7429 regno = -1;
7430 }
7431 else
7432 regno = -1;
7433
7434 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7435 into anything. */
7436 if (class == GENERAL_REGS || class == BASE_REGS
7437 || (regno >= 0 && INT_REGNO_P (regno)))
7438 return NO_REGS;
7439
7440 /* Constants, memory, and FP registers can go into FP registers. */
7441 if ((regno == -1 || FP_REGNO_P (regno))
7442 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
7443 return NO_REGS;
7444
7445 /* Memory, and AltiVec registers can go into AltiVec registers. */
7446 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
7447 && class == ALTIVEC_REGS)
7448 return NO_REGS;
7449
7450 /* We can copy among the CR registers. */
7451 if ((class == CR_REGS || class == CR0_REGS)
7452 && regno >= 0 && CR_REGNO_P (regno))
7453 return NO_REGS;
7454
7455 /* Otherwise, we need GENERAL_REGS. */
7456 return GENERAL_REGS;
7457 }
7458 \f
7459 /* Given a comparison operation, return the bit number in CCR to test. We
7460 know this is a valid comparison.
7461
7462 SCC_P is 1 if this is for an scc. That means that %D will have been
7463 used instead of %C, so the bits will be in different places.
7464
7465 Return -1 if OP isn't a valid comparison for some reason. */
7466
7467 int
7468 ccr_bit (op, scc_p)
7469 rtx op;
7470 int scc_p;
7471 {
7472 enum rtx_code code = GET_CODE (op);
7473 enum machine_mode cc_mode;
7474 int cc_regnum;
7475 int base_bit;
7476 rtx reg;
7477
7478 if (GET_RTX_CLASS (code) != '<')
7479 return -1;
7480
7481 reg = XEXP (op, 0);
7482
7483 if (GET_CODE (reg) != REG
7484 || ! CR_REGNO_P (REGNO (reg)))
7485 abort ();
7486
7487 cc_mode = GET_MODE (reg);
7488 cc_regnum = REGNO (reg);
7489 base_bit = 4 * (cc_regnum - CR0_REGNO);
7490
7491 validate_condition_mode (code, cc_mode);
7492
7493 switch (code)
7494 {
7495 case NE:
7496 if (TARGET_E500 && !TARGET_FPRS
7497 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7498 return base_bit + 1;
7499 return scc_p ? base_bit + 3 : base_bit + 2;
7500 case EQ:
7501 if (TARGET_E500 && !TARGET_FPRS
7502 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7503 return base_bit + 1;
7504 return base_bit + 2;
7505 case GT: case GTU: case UNLE:
7506 return base_bit + 1;
7507 case LT: case LTU: case UNGE:
7508 return base_bit;
7509 case ORDERED: case UNORDERED:
7510 return base_bit + 3;
7511
7512 case GE: case GEU:
7513 /* If scc, we will have done a cror to put the bit in the
7514 unordered position. So test that bit. For integer, this is ! LT
7515 unless this is an scc insn. */
7516 return scc_p ? base_bit + 3 : base_bit;
7517
7518 case LE: case LEU:
7519 return scc_p ? base_bit + 3 : base_bit + 1;
7520
7521 default:
7522 abort ();
7523 }
7524 }
7525 \f
7526 /* Return the GOT register. */
7527
7528 struct rtx_def *
7529 rs6000_got_register (value)
7530 rtx value ATTRIBUTE_UNUSED;
7531 {
7532 /* The second flow pass currently (June 1999) can't update
7533 regs_ever_live without disturbing other parts of the compiler, so
7534 update it here to make the prolog/epilogue code happy. */
7535 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
7536 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
7537
7538 current_function_uses_pic_offset_table = 1;
7539
7540 return pic_offset_table_rtx;
7541 }
7542 \f
7543 /* Function to init struct machine_function.
7544 This will be called, via a pointer variable,
7545 from push_function_context. */
7546
7547 static struct machine_function *
7548 rs6000_init_machine_status ()
7549 {
7550 return ggc_alloc_cleared (sizeof (machine_function));
7551 }
7552 \f
7553 /* These macros test for integers and extract the low-order bits. */
7554 #define INT_P(X) \
7555 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
7556 && GET_MODE (X) == VOIDmode)
7557
7558 #define INT_LOWPART(X) \
7559 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
7560
7561 int
7562 extract_MB (op)
7563 rtx op;
7564 {
7565 int i;
7566 unsigned long val = INT_LOWPART (op);
7567
7568 /* If the high bit is zero, the value is the first 1 bit we find
7569 from the left. */
7570 if ((val & 0x80000000) == 0)
7571 {
7572 if ((val & 0xffffffff) == 0)
7573 abort ();
7574
7575 i = 1;
7576 while (((val <<= 1) & 0x80000000) == 0)
7577 ++i;
7578 return i;
7579 }
7580
7581 /* If the high bit is set and the low bit is not, or the mask is all
7582 1's, the value is zero. */
7583 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
7584 return 0;
7585
7586 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7587 from the right. */
7588 i = 31;
7589 while (((val >>= 1) & 1) != 0)
7590 --i;
7591
7592 return i;
7593 }
7594
7595 int
7596 extract_ME (op)
7597 rtx op;
7598 {
7599 int i;
7600 unsigned long val = INT_LOWPART (op);
7601
7602 /* If the low bit is zero, the value is the first 1 bit we find from
7603 the right. */
7604 if ((val & 1) == 0)
7605 {
7606 if ((val & 0xffffffff) == 0)
7607 abort ();
7608
7609 i = 30;
7610 while (((val >>= 1) & 1) == 0)
7611 --i;
7612
7613 return i;
7614 }
7615
7616 /* If the low bit is set and the high bit is not, or the mask is all
7617 1's, the value is 31. */
7618 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
7619 return 31;
7620
7621 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
7622 from the left. */
7623 i = 0;
7624 while (((val <<= 1) & 0x80000000) != 0)
7625 ++i;
7626
7627 return i;
7628 }
7629
7630 /* Print an operand. Recognize special options, documented below. */
7631
7632 #if TARGET_ELF
7633 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
7634 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
7635 #else
7636 #define SMALL_DATA_RELOC "sda21"
7637 #define SMALL_DATA_REG 0
7638 #endif
7639
7640 void
7641 print_operand (file, x, code)
7642 FILE *file;
7643 rtx x;
7644 int code;
7645 {
7646 int i;
7647 HOST_WIDE_INT val;
7648 unsigned HOST_WIDE_INT uval;
7649
7650 switch (code)
7651 {
7652 case '.':
7653 /* Write out an instruction after the call which may be replaced
7654 with glue code by the loader. This depends on the AIX version. */
7655 asm_fprintf (file, RS6000_CALL_GLUE);
7656 return;
7657
7658 /* %a is output_address. */
7659
7660 case 'A':
7661 /* If X is a constant integer whose low-order 5 bits are zero,
7662 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
7663 in the AIX assembler where "sri" with a zero shift count
7664 writes a trash instruction. */
7665 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
7666 putc ('l', file);
7667 else
7668 putc ('r', file);
7669 return;
7670
7671 case 'b':
7672 /* If constant, low-order 16 bits of constant, unsigned.
7673 Otherwise, write normally. */
7674 if (INT_P (x))
7675 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
7676 else
7677 print_operand (file, x, 0);
7678 return;
7679
7680 case 'B':
7681 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
7682 for 64-bit mask direction. */
7683 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
7684 return;
7685
7686 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
7687 output_operand. */
7688
7689 case 'D':
7690 /* There used to be a comment for 'C' reading "This is an
7691 optional cror needed for certain floating-point
7692 comparisons. Otherwise write nothing." */
7693
7694 /* Similar, except that this is for an scc, so we must be able to
7695 encode the test in a single bit that is one. We do the above
7696 for any LE, GE, GEU, or LEU and invert the bit for NE. */
7697 if (GET_CODE (x) == LE || GET_CODE (x) == GE
7698 || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
7699 {
7700 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7701
7702 fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
7703 base_bit + 2,
7704 base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
7705 }
7706
7707 else if (GET_CODE (x) == NE)
7708 {
7709 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7710
7711 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
7712 base_bit + 2, base_bit + 2);
7713 }
7714 else if (TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT
7715 && GET_CODE (x) == EQ
7716 && GET_MODE (XEXP (x, 0)) == CCFPmode)
7717 {
7718 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
7719
7720 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 1,
7721 base_bit + 1, base_bit + 1);
7722 }
7723 return;
7724
7725 case 'E':
7726 /* X is a CR register. Print the number of the EQ bit of the CR */
7727 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7728 output_operand_lossage ("invalid %%E value");
7729 else
7730 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
7731 return;
7732
7733 case 'f':
7734 /* X is a CR register. Print the shift count needed to move it
7735 to the high-order four bits. */
7736 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7737 output_operand_lossage ("invalid %%f value");
7738 else
7739 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
7740 return;
7741
7742 case 'F':
7743 /* Similar, but print the count for the rotate in the opposite
7744 direction. */
7745 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7746 output_operand_lossage ("invalid %%F value");
7747 else
7748 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
7749 return;
7750
7751 case 'G':
7752 /* X is a constant integer. If it is negative, print "m",
7753 otherwise print "z". This is to make an aze or ame insn. */
7754 if (GET_CODE (x) != CONST_INT)
7755 output_operand_lossage ("invalid %%G value");
7756 else if (INTVAL (x) >= 0)
7757 putc ('z', file);
7758 else
7759 putc ('m', file);
7760 return;
7761
7762 case 'h':
7763 /* If constant, output low-order five bits. Otherwise, write
7764 normally. */
7765 if (INT_P (x))
7766 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
7767 else
7768 print_operand (file, x, 0);
7769 return;
7770
7771 case 'H':
7772 /* If constant, output low-order six bits. Otherwise, write
7773 normally. */
7774 if (INT_P (x))
7775 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
7776 else
7777 print_operand (file, x, 0);
7778 return;
7779
7780 case 'I':
7781 /* Print `i' if this is a constant, else nothing. */
7782 if (INT_P (x))
7783 putc ('i', file);
7784 return;
7785
7786 case 'j':
7787 /* Write the bit number in CCR for jump. */
7788 i = ccr_bit (x, 0);
7789 if (i == -1)
7790 output_operand_lossage ("invalid %%j code");
7791 else
7792 fprintf (file, "%d", i);
7793 return;
7794
7795 case 'J':
7796 /* Similar, but add one for shift count in rlinm for scc and pass
7797 scc flag to `ccr_bit'. */
7798 i = ccr_bit (x, 1);
7799 if (i == -1)
7800 output_operand_lossage ("invalid %%J code");
7801 else
7802 /* If we want bit 31, write a shift count of zero, not 32. */
7803 fprintf (file, "%d", i == 31 ? 0 : i + 1);
7804 return;
7805
7806 case 'k':
7807 /* X must be a constant. Write the 1's complement of the
7808 constant. */
7809 if (! INT_P (x))
7810 output_operand_lossage ("invalid %%k value");
7811 else
7812 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
7813 return;
7814
7815 case 'K':
7816 /* X must be a symbolic constant on ELF. Write an
7817 expression suitable for an 'addi' that adds in the low 16
7818 bits of the MEM. */
7819 if (GET_CODE (x) != CONST)
7820 {
7821 print_operand_address (file, x);
7822 fputs ("@l", file);
7823 }
7824 else
7825 {
7826 if (GET_CODE (XEXP (x, 0)) != PLUS
7827 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
7828 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
7829 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
7830 output_operand_lossage ("invalid %%K value");
7831 print_operand_address (file, XEXP (XEXP (x, 0), 0));
7832 fputs ("@l", file);
7833 /* For GNU as, there must be a non-alphanumeric character
7834 between 'l' and the number. The '-' is added by
7835 print_operand() already. */
7836 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
7837 fputs ("+", file);
7838 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
7839 }
7840 return;
7841
7842 /* %l is output_asm_label. */
7843
7844 case 'L':
7845 /* Write second word of DImode or DFmode reference. Works on register
7846 or non-indexed memory only. */
7847 if (GET_CODE (x) == REG)
7848 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
7849 else if (GET_CODE (x) == MEM)
7850 {
7851 /* Handle possible auto-increment. Since it is pre-increment and
7852 we have already done it, we can just use an offset of word. */
7853 if (GET_CODE (XEXP (x, 0)) == PRE_INC
7854 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
7855 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
7856 UNITS_PER_WORD));
7857 else
7858 output_address (XEXP (adjust_address_nv (x, SImode,
7859 UNITS_PER_WORD),
7860 0));
7861
7862 if (small_data_operand (x, GET_MODE (x)))
7863 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
7864 reg_names[SMALL_DATA_REG]);
7865 }
7866 return;
7867
7868 case 'm':
7869 /* MB value for a mask operand. */
7870 if (! mask_operand (x, SImode))
7871 output_operand_lossage ("invalid %%m value");
7872
7873 fprintf (file, "%d", extract_MB (x));
7874 return;
7875
7876 case 'M':
7877 /* ME value for a mask operand. */
7878 if (! mask_operand (x, SImode))
7879 output_operand_lossage ("invalid %%M value");
7880
7881 fprintf (file, "%d", extract_ME (x));
7882 return;
7883
7884 /* %n outputs the negative of its operand. */
7885
7886 case 'N':
7887 /* Write the number of elements in the vector times 4. */
7888 if (GET_CODE (x) != PARALLEL)
7889 output_operand_lossage ("invalid %%N value");
7890 else
7891 fprintf (file, "%d", XVECLEN (x, 0) * 4);
7892 return;
7893
7894 case 'O':
7895 /* Similar, but subtract 1 first. */
7896 if (GET_CODE (x) != PARALLEL)
7897 output_operand_lossage ("invalid %%O value");
7898 else
7899 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
7900 return;
7901
7902 case 'p':
7903 /* X is a CONST_INT that is a power of two. Output the logarithm. */
7904 if (! INT_P (x)
7905 || INT_LOWPART (x) < 0
7906 || (i = exact_log2 (INT_LOWPART (x))) < 0)
7907 output_operand_lossage ("invalid %%p value");
7908 else
7909 fprintf (file, "%d", i);
7910 return;
7911
7912 case 'P':
7913 /* The operand must be an indirect memory reference. The result
7914 is the register number. */
7915 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
7916 || REGNO (XEXP (x, 0)) >= 32)
7917 output_operand_lossage ("invalid %%P value");
7918 else
7919 fprintf (file, "%d", REGNO (XEXP (x, 0)));
7920 return;
7921
7922 case 'q':
7923 /* This outputs the logical code corresponding to a boolean
7924 expression. The expression may have one or both operands
7925 negated (if one, only the first one). For condition register
7926 logical operations, it will also treat the negated
7927 CR codes as NOTs, but not handle NOTs of them. */
7928 {
7929 const char *const *t = 0;
7930 const char *s;
7931 enum rtx_code code = GET_CODE (x);
7932 static const char * const tbl[3][3] = {
7933 { "and", "andc", "nor" },
7934 { "or", "orc", "nand" },
7935 { "xor", "eqv", "xor" } };
7936
7937 if (code == AND)
7938 t = tbl[0];
7939 else if (code == IOR)
7940 t = tbl[1];
7941 else if (code == XOR)
7942 t = tbl[2];
7943 else
7944 output_operand_lossage ("invalid %%q value");
7945
7946 if (GET_CODE (XEXP (x, 0)) != NOT)
7947 s = t[0];
7948 else
7949 {
7950 if (GET_CODE (XEXP (x, 1)) == NOT)
7951 s = t[2];
7952 else
7953 s = t[1];
7954 }
7955
7956 fputs (s, file);
7957 }
7958 return;
7959
7960 case 'R':
7961 /* X is a CR register. Print the mask for `mtcrf'. */
7962 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
7963 output_operand_lossage ("invalid %%R value");
7964 else
7965 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
7966 return;
7967
7968 case 's':
7969 /* Low 5 bits of 32 - value */
7970 if (! INT_P (x))
7971 output_operand_lossage ("invalid %%s value");
7972 else
7973 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
7974 return;
7975
7976 case 'S':
7977 /* PowerPC64 mask position. All 0's is excluded.
7978 CONST_INT 32-bit mask is considered sign-extended so any
7979 transition must occur within the CONST_INT, not on the boundary. */
7980 if (! mask64_operand (x, DImode))
7981 output_operand_lossage ("invalid %%S value");
7982
7983 uval = INT_LOWPART (x);
7984
7985 if (uval & 1) /* Clear Left */
7986 {
7987 #if HOST_BITS_PER_WIDE_INT > 64
7988 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
7989 #endif
7990 i = 64;
7991 }
7992 else /* Clear Right */
7993 {
7994 uval = ~uval;
7995 #if HOST_BITS_PER_WIDE_INT > 64
7996 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
7997 #endif
7998 i = 63;
7999 }
8000 while (uval != 0)
8001 --i, uval >>= 1;
8002 if (i < 0)
8003 abort ();
8004 fprintf (file, "%d", i);
8005 return;
8006
8007 case 't':
8008 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
8009 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
8010 abort ();
8011
8012 /* Bit 3 is OV bit. */
8013 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
8014
8015 /* If we want bit 31, write a shift count of zero, not 32. */
8016 fprintf (file, "%d", i == 31 ? 0 : i + 1);
8017 return;
8018
8019 case 'T':
8020 /* Print the symbolic name of a branch target register. */
8021 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
8022 && REGNO (x) != COUNT_REGISTER_REGNUM))
8023 output_operand_lossage ("invalid %%T value");
8024 else if (REGNO (x) == LINK_REGISTER_REGNUM)
8025 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
8026 else
8027 fputs ("ctr", file);
8028 return;
8029
8030 case 'u':
8031 /* High-order 16 bits of constant for use in unsigned operand. */
8032 if (! INT_P (x))
8033 output_operand_lossage ("invalid %%u value");
8034 else
8035 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8036 (INT_LOWPART (x) >> 16) & 0xffff);
8037 return;
8038
8039 case 'v':
8040 /* High-order 16 bits of constant for use in signed operand. */
8041 if (! INT_P (x))
8042 output_operand_lossage ("invalid %%v value");
8043 else
8044 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8045 (INT_LOWPART (x) >> 16) & 0xffff);
8046 return;
8047
8048 case 'U':
8049 /* Print `u' if this has an auto-increment or auto-decrement. */
8050 if (GET_CODE (x) == MEM
8051 && (GET_CODE (XEXP (x, 0)) == PRE_INC
8052 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
8053 putc ('u', file);
8054 return;
8055
8056 case 'V':
8057 /* Print the trap code for this operand. */
8058 switch (GET_CODE (x))
8059 {
8060 case EQ:
8061 fputs ("eq", file); /* 4 */
8062 break;
8063 case NE:
8064 fputs ("ne", file); /* 24 */
8065 break;
8066 case LT:
8067 fputs ("lt", file); /* 16 */
8068 break;
8069 case LE:
8070 fputs ("le", file); /* 20 */
8071 break;
8072 case GT:
8073 fputs ("gt", file); /* 8 */
8074 break;
8075 case GE:
8076 fputs ("ge", file); /* 12 */
8077 break;
8078 case LTU:
8079 fputs ("llt", file); /* 2 */
8080 break;
8081 case LEU:
8082 fputs ("lle", file); /* 6 */
8083 break;
8084 case GTU:
8085 fputs ("lgt", file); /* 1 */
8086 break;
8087 case GEU:
8088 fputs ("lge", file); /* 5 */
8089 break;
8090 default:
8091 abort ();
8092 }
8093 break;
8094
8095 case 'w':
8096 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
8097 normally. */
8098 if (INT_P (x))
8099 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
8100 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
8101 else
8102 print_operand (file, x, 0);
8103 return;
8104
8105 case 'W':
8106 /* MB value for a PowerPC64 rldic operand. */
8107 val = (GET_CODE (x) == CONST_INT
8108 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
8109
8110 if (val < 0)
8111 i = -1;
8112 else
8113 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
8114 if ((val <<= 1) < 0)
8115 break;
8116
8117 #if HOST_BITS_PER_WIDE_INT == 32
8118 if (GET_CODE (x) == CONST_INT && i >= 0)
8119 i += 32; /* zero-extend high-part was all 0's */
8120 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
8121 {
8122 val = CONST_DOUBLE_LOW (x);
8123
8124 if (val == 0)
8125 abort ();
8126 else if (val < 0)
8127 --i;
8128 else
8129 for ( ; i < 64; i++)
8130 if ((val <<= 1) < 0)
8131 break;
8132 }
8133 #endif
8134
8135 fprintf (file, "%d", i + 1);
8136 return;
8137
8138 case 'X':
8139 if (GET_CODE (x) == MEM
8140 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
8141 putc ('x', file);
8142 return;
8143
8144 case 'Y':
8145 /* Like 'L', for third word of TImode */
8146 if (GET_CODE (x) == REG)
8147 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
8148 else if (GET_CODE (x) == MEM)
8149 {
8150 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8151 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8152 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
8153 else
8154 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
8155 if (small_data_operand (x, GET_MODE (x)))
8156 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8157 reg_names[SMALL_DATA_REG]);
8158 }
8159 return;
8160
8161 case 'z':
8162 /* X is a SYMBOL_REF. Write out the name preceded by a
8163 period and without any trailing data in brackets. Used for function
8164 names. If we are configured for System V (or the embedded ABI) on
8165 the PowerPC, do not emit the period, since those systems do not use
8166 TOCs and the like. */
8167 if (GET_CODE (x) != SYMBOL_REF)
8168 abort ();
8169
8170 if (XSTR (x, 0)[0] != '.')
8171 {
8172 switch (DEFAULT_ABI)
8173 {
8174 default:
8175 abort ();
8176
8177 case ABI_AIX:
8178 putc ('.', file);
8179 break;
8180
8181 case ABI_V4:
8182 case ABI_AIX_NODESC:
8183 case ABI_DARWIN:
8184 break;
8185 }
8186 }
8187 #if TARGET_AIX
8188 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
8189 #else
8190 assemble_name (file, XSTR (x, 0));
8191 #endif
8192 return;
8193
8194 case 'Z':
8195 /* Like 'L', for last word of TImode. */
8196 if (GET_CODE (x) == REG)
8197 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
8198 else if (GET_CODE (x) == MEM)
8199 {
8200 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8201 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8202 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
8203 else
8204 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
8205 if (small_data_operand (x, GET_MODE (x)))
8206 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8207 reg_names[SMALL_DATA_REG]);
8208 }
8209 return;
8210
8211 /* Print AltiVec or SPE memory operand. */
8212 case 'y':
8213 {
8214 rtx tmp;
8215
8216 if (GET_CODE (x) != MEM)
8217 abort ();
8218
8219 tmp = XEXP (x, 0);
8220
8221 if (TARGET_E500)
8222 {
8223 /* Handle [reg]. */
8224 if (GET_CODE (tmp) == REG)
8225 {
8226 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
8227 break;
8228 }
8229 /* Handle [reg+UIMM]. */
8230 else if (GET_CODE (tmp) == PLUS &&
8231 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
8232 {
8233 int x;
8234
8235 if (GET_CODE (XEXP (tmp, 0)) != REG)
8236 abort ();
8237
8238 x = INTVAL (XEXP (tmp, 1));
8239 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
8240 break;
8241 }
8242
8243 /* Fall through. Must be [reg+reg]. */
8244 }
8245 if (GET_CODE (tmp) == REG)
8246 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
8247 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
8248 {
8249 if (REGNO (XEXP (tmp, 0)) == 0)
8250 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
8251 reg_names[ REGNO (XEXP (tmp, 0)) ]);
8252 else
8253 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
8254 reg_names[ REGNO (XEXP (tmp, 1)) ]);
8255 }
8256 else
8257 abort ();
8258 break;
8259 }
8260
8261 case 0:
8262 if (GET_CODE (x) == REG)
8263 fprintf (file, "%s", reg_names[REGNO (x)]);
8264 else if (GET_CODE (x) == MEM)
8265 {
8266 /* We need to handle PRE_INC and PRE_DEC here, since we need to
8267 know the width from the mode. */
8268 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
8269 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
8270 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8271 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
8272 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
8273 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8274 else
8275 output_address (XEXP (x, 0));
8276 }
8277 else
8278 output_addr_const (file, x);
8279 return;
8280
8281 default:
8282 output_operand_lossage ("invalid %%xn code");
8283 }
8284 }
8285 \f
8286 /* Print the address of an operand. */
8287
8288 void
8289 print_operand_address (file, x)
8290 FILE *file;
8291 rtx x;
8292 {
8293 if (GET_CODE (x) == REG)
8294 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
8295 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
8296 || GET_CODE (x) == LABEL_REF)
8297 {
8298 output_addr_const (file, x);
8299 if (small_data_operand (x, GET_MODE (x)))
8300 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8301 reg_names[SMALL_DATA_REG]);
8302 else if (TARGET_TOC)
8303 abort ();
8304 }
8305 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
8306 {
8307 if (REGNO (XEXP (x, 0)) == 0)
8308 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
8309 reg_names[ REGNO (XEXP (x, 0)) ]);
8310 else
8311 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
8312 reg_names[ REGNO (XEXP (x, 1)) ]);
8313 }
8314 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
8315 {
8316 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
8317 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8318 }
8319 #if TARGET_ELF
8320 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8321 && CONSTANT_P (XEXP (x, 1)))
8322 {
8323 output_addr_const (file, XEXP (x, 1));
8324 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8325 }
8326 #endif
8327 #if TARGET_MACHO
8328 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8329 && CONSTANT_P (XEXP (x, 1)))
8330 {
8331 fprintf (file, "lo16(");
8332 output_addr_const (file, XEXP (x, 1));
8333 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8334 }
8335 #endif
8336 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
8337 {
8338 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
8339 {
8340 rtx contains_minus = XEXP (x, 1);
8341 rtx minus, symref;
8342 const char *name;
8343
8344 /* Find the (minus (sym) (toc)) buried in X, and temporarily
8345 turn it into (sym) for output_addr_const. */
8346 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
8347 contains_minus = XEXP (contains_minus, 0);
8348
8349 minus = XEXP (contains_minus, 0);
8350 symref = XEXP (minus, 0);
8351 XEXP (contains_minus, 0) = symref;
8352 if (TARGET_ELF)
8353 {
8354 char *newname;
8355
8356 name = XSTR (symref, 0);
8357 newname = alloca (strlen (name) + sizeof ("@toc"));
8358 strcpy (newname, name);
8359 strcat (newname, "@toc");
8360 XSTR (symref, 0) = newname;
8361 }
8362 output_addr_const (file, XEXP (x, 1));
8363 if (TARGET_ELF)
8364 XSTR (symref, 0) = name;
8365 XEXP (contains_minus, 0) = minus;
8366 }
8367 else
8368 output_addr_const (file, XEXP (x, 1));
8369
8370 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
8371 }
8372 else
8373 abort ();
8374 }
8375 \f
8376 /* Target hook for assembling integer objects. The PowerPC version has
8377 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8378 is defined. It also needs to handle DI-mode objects on 64-bit
8379 targets. */
8380
8381 static bool
8382 rs6000_assemble_integer (x, size, aligned_p)
8383 rtx x;
8384 unsigned int size;
8385 int aligned_p;
8386 {
8387 #ifdef RELOCATABLE_NEEDS_FIXUP
8388 /* Special handling for SI values. */
8389 if (size == 4 && aligned_p)
8390 {
8391 extern int in_toc_section PARAMS ((void));
8392 static int recurse = 0;
8393
8394 /* For -mrelocatable, we mark all addresses that need to be fixed up
8395 in the .fixup section. */
8396 if (TARGET_RELOCATABLE
8397 && !in_toc_section ()
8398 && !in_text_section ()
8399 && !recurse
8400 && GET_CODE (x) != CONST_INT
8401 && GET_CODE (x) != CONST_DOUBLE
8402 && CONSTANT_P (x))
8403 {
8404 char buf[256];
8405
8406 recurse = 1;
8407 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
8408 fixuplabelno++;
8409 ASM_OUTPUT_LABEL (asm_out_file, buf);
8410 fprintf (asm_out_file, "\t.long\t(");
8411 output_addr_const (asm_out_file, x);
8412 fprintf (asm_out_file, ")@fixup\n");
8413 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
8414 ASM_OUTPUT_ALIGN (asm_out_file, 2);
8415 fprintf (asm_out_file, "\t.long\t");
8416 assemble_name (asm_out_file, buf);
8417 fprintf (asm_out_file, "\n\t.previous\n");
8418 recurse = 0;
8419 return true;
8420 }
8421 /* Remove initial .'s to turn a -mcall-aixdesc function
8422 address into the address of the descriptor, not the function
8423 itself. */
8424 else if (GET_CODE (x) == SYMBOL_REF
8425 && XSTR (x, 0)[0] == '.'
8426 && DEFAULT_ABI == ABI_AIX)
8427 {
8428 const char *name = XSTR (x, 0);
8429 while (*name == '.')
8430 name++;
8431
8432 fprintf (asm_out_file, "\t.long\t%s\n", name);
8433 return true;
8434 }
8435 }
8436 #endif /* RELOCATABLE_NEEDS_FIXUP */
8437 return default_assemble_integer (x, size, aligned_p);
8438 }
8439
8440 #ifdef HAVE_GAS_HIDDEN
8441 /* Emit an assembler directive to set symbol visibility for DECL to
8442 VISIBILITY_TYPE. */
8443
8444 static void
8445 rs6000_assemble_visibility (decl, vis)
8446 tree decl;
8447 int vis;
8448 {
8449 /* Functions need to have their entry point symbol visibility set as
8450 well as their descriptor symbol visibility. */
8451 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
8452 {
8453 static const char * const visibility_types[] = {
8454 NULL, "internal", "hidden", "protected"
8455 };
8456
8457 const char *name, *type;
8458
8459 name = ((* targetm.strip_name_encoding)
8460 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
8461 type = visibility_types[vis];
8462
8463 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
8464 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
8465 }
8466 else
8467 default_assemble_visibility (decl, vis);
8468 }
8469 #endif
8470 \f
8471 enum rtx_code
8472 rs6000_reverse_condition (mode, code)
8473 enum machine_mode mode;
8474 enum rtx_code code;
8475 {
8476 /* Reversal of FP compares takes care -- an ordered compare
8477 becomes an unordered compare and vice versa. */
8478 if (mode == CCFPmode && !flag_unsafe_math_optimizations)
8479 return reverse_condition_maybe_unordered (code);
8480 else
8481 return reverse_condition (code);
8482 }
8483
8484 /* Generate a compare for CODE. Return a brand-new rtx that
8485 represents the result of the compare. */
8486
8487 static rtx
8488 rs6000_generate_compare (code)
8489 enum rtx_code code;
8490 {
8491 enum machine_mode comp_mode;
8492 rtx compare_result;
8493
8494 if (rs6000_compare_fp_p)
8495 comp_mode = CCFPmode;
8496 else if (code == GTU || code == LTU
8497 || code == GEU || code == LEU)
8498 comp_mode = CCUNSmode;
8499 else
8500 comp_mode = CCmode;
8501
8502 /* First, the compare. */
8503 compare_result = gen_reg_rtx (comp_mode);
8504
8505 /* SPE FP compare instructions on the GPRs. Yuck! */
8506 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
8507 && rs6000_compare_fp_p)
8508 {
8509 rtx cmp, or1, or2, or_result, compare_result2;
8510
8511 switch (code)
8512 {
8513 case EQ:
8514 case UNEQ:
8515 case NE:
8516 case LTGT:
8517 cmp = flag_unsafe_math_optimizations
8518 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
8519 rs6000_compare_op1)
8520 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
8521 rs6000_compare_op1);
8522 break;
8523 case GT:
8524 case GTU:
8525 case UNGT:
8526 case UNGE:
8527 case GE:
8528 case GEU:
8529 cmp = flag_unsafe_math_optimizations
8530 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
8531 rs6000_compare_op1)
8532 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
8533 rs6000_compare_op1);
8534 break;
8535 case LT:
8536 case LTU:
8537 case UNLT:
8538 case UNLE:
8539 case LE:
8540 case LEU:
8541 cmp = flag_unsafe_math_optimizations
8542 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
8543 rs6000_compare_op1)
8544 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
8545 rs6000_compare_op1);
8546 break;
8547 default:
8548 abort ();
8549 }
8550
8551 /* Synthesize LE and GE from LT/GT || EQ. */
8552 if (code == LE || code == GE || code == LEU || code == GEU)
8553 {
8554 /* Synthesize GE/LE frome GT/LT || EQ. */
8555
8556 emit_insn (cmp);
8557
8558 switch (code)
8559 {
8560 case LE: code = LT; break;
8561 case GE: code = GT; break;
8562 case LEU: code = LT; break;
8563 case GEU: code = GT; break;
8564 default: abort ();
8565 }
8566
8567 or1 = gen_reg_rtx (SImode);
8568 or2 = gen_reg_rtx (SImode);
8569 or_result = gen_reg_rtx (CCEQmode);
8570 compare_result2 = gen_reg_rtx (CCFPmode);
8571
8572 /* Do the EQ. */
8573 cmp = flag_unsafe_math_optimizations
8574 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
8575 rs6000_compare_op1)
8576 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
8577 rs6000_compare_op1);
8578 emit_insn (cmp);
8579
8580 /* The MC8540 FP compare instructions set the CR bits
8581 differently than other PPC compare instructions. For
8582 that matter, there is no generic test instruction, but a
8583 testgt, testlt, and testeq. For a true condition, bit 2
8584 is set (x1xx) in the CR. Following the traditional CR
8585 values:
8586
8587 LT GT EQ OV
8588 bit3 bit2 bit1 bit0
8589
8590 ... bit 2 would be a GT CR alias, so later on we
8591 look in the GT bits for the branch instructions.
8592 However, we must be careful to emit correct RTL in
8593 the meantime, so optimizations don't get confused. */
8594
8595 or1 = gen_rtx (NE, SImode, compare_result, const0_rtx);
8596 or2 = gen_rtx (NE, SImode, compare_result2, const0_rtx);
8597
8598 /* OR them together. */
8599 cmp = gen_rtx_SET (VOIDmode, or_result,
8600 gen_rtx_COMPARE (CCEQmode,
8601 gen_rtx_IOR (SImode, or1, or2),
8602 const_true_rtx));
8603 compare_result = or_result;
8604 code = EQ;
8605 }
8606 else
8607 {
8608 /* We only care about 1 bit (x1xx), so map everything to NE to
8609 maintain rtl sanity. We'll get to the right bit (x1xx) at
8610 code output time. */
8611 if (code == NE || code == LTGT)
8612 /* Do the inverse here because we have no cmpne
8613 instruction. We use the cmpeq instruction and expect
8614 to get a 0 instead. */
8615 code = EQ;
8616 else
8617 code = NE;
8618 }
8619
8620 emit_insn (cmp);
8621 }
8622 else
8623 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
8624 gen_rtx_COMPARE (comp_mode,
8625 rs6000_compare_op0,
8626 rs6000_compare_op1)));
8627
8628 /* Some kinds of FP comparisons need an OR operation;
8629 except for flag_unsafe_math_optimizations we don't bother. */
8630 if (rs6000_compare_fp_p
8631 && ! flag_unsafe_math_optimizations
8632 && ! (TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
8633 && (code == LE || code == GE
8634 || code == UNEQ || code == LTGT
8635 || code == UNGT || code == UNLT))
8636 {
8637 enum rtx_code or1, or2;
8638 rtx or1_rtx, or2_rtx, compare2_rtx;
8639 rtx or_result = gen_reg_rtx (CCEQmode);
8640
8641 switch (code)
8642 {
8643 case LE: or1 = LT; or2 = EQ; break;
8644 case GE: or1 = GT; or2 = EQ; break;
8645 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
8646 case LTGT: or1 = LT; or2 = GT; break;
8647 case UNGT: or1 = UNORDERED; or2 = GT; break;
8648 case UNLT: or1 = UNORDERED; or2 = LT; break;
8649 default: abort ();
8650 }
8651 validate_condition_mode (or1, comp_mode);
8652 validate_condition_mode (or2, comp_mode);
8653 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
8654 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
8655 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
8656 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
8657 const_true_rtx);
8658 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
8659
8660 compare_result = or_result;
8661 code = EQ;
8662 }
8663
8664 validate_condition_mode (code, GET_MODE (compare_result));
8665
8666 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
8667 }
8668
8669
8670 /* Emit the RTL for an sCOND pattern. */
8671
8672 void
8673 rs6000_emit_sCOND (code, result)
8674 enum rtx_code code;
8675 rtx result;
8676 {
8677 rtx condition_rtx;
8678 enum machine_mode op_mode;
8679
8680 condition_rtx = rs6000_generate_compare (code);
8681
8682 op_mode = GET_MODE (rs6000_compare_op0);
8683 if (op_mode == VOIDmode)
8684 op_mode = GET_MODE (rs6000_compare_op1);
8685
8686 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
8687 {
8688 PUT_MODE (condition_rtx, DImode);
8689 convert_move (result, condition_rtx, 0);
8690 }
8691 else
8692 {
8693 PUT_MODE (condition_rtx, SImode);
8694 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
8695 }
8696 }
8697
8698 /* Emit a branch of kind CODE to location LOC. */
8699
8700 void
8701 rs6000_emit_cbranch (code, loc)
8702 enum rtx_code code;
8703 rtx loc;
8704 {
8705 rtx condition_rtx, loc_ref;
8706
8707 condition_rtx = rs6000_generate_compare (code);
8708 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
8709 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
8710 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
8711 loc_ref, pc_rtx)));
8712 }
8713
8714 /* Return the string to output a conditional branch to LABEL, which is
8715 the operand number of the label, or -1 if the branch is really a
8716 conditional return.
8717
8718 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
8719 condition code register and its mode specifies what kind of
8720 comparison we made.
8721
8722 REVERSED is nonzero if we should reverse the sense of the comparison.
8723
8724 INSN is the insn. */
8725
8726 char *
8727 output_cbranch (op, label, reversed, insn)
8728 rtx op;
8729 const char * label;
8730 int reversed;
8731 rtx insn;
8732 {
8733 static char string[64];
8734 enum rtx_code code = GET_CODE (op);
8735 rtx cc_reg = XEXP (op, 0);
8736 enum machine_mode mode = GET_MODE (cc_reg);
8737 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
8738 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
8739 int really_reversed = reversed ^ need_longbranch;
8740 char *s = string;
8741 const char *ccode;
8742 const char *pred;
8743 rtx note;
8744
8745 validate_condition_mode (code, mode);
8746
8747 /* Work out which way this really branches. We could use
8748 reverse_condition_maybe_unordered here always but this
8749 makes the resulting assembler clearer. */
8750 if (really_reversed)
8751 {
8752 /* Reversal of FP compares takes care -- an ordered compare
8753 becomes an unordered compare and vice versa. */
8754 if (mode == CCFPmode)
8755 code = reverse_condition_maybe_unordered (code);
8756 else
8757 code = reverse_condition (code);
8758 }
8759
8760 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
8761 {
8762 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
8763 to the GT bit. */
8764 if (code == EQ)
8765 /* Opposite of GT. */
8766 code = UNLE;
8767 else if (code == NE)
8768 code = GT;
8769 else
8770 abort ();
8771 }
8772
8773 switch (code)
8774 {
8775 /* Not all of these are actually distinct opcodes, but
8776 we distinguish them for clarity of the resulting assembler. */
8777 case NE: case LTGT:
8778 ccode = "ne"; break;
8779 case EQ: case UNEQ:
8780 ccode = "eq"; break;
8781 case GE: case GEU:
8782 ccode = "ge"; break;
8783 case GT: case GTU: case UNGT:
8784 ccode = "gt"; break;
8785 case LE: case LEU:
8786 ccode = "le"; break;
8787 case LT: case LTU: case UNLT:
8788 ccode = "lt"; break;
8789 case UNORDERED: ccode = "un"; break;
8790 case ORDERED: ccode = "nu"; break;
8791 case UNGE: ccode = "nl"; break;
8792 case UNLE: ccode = "ng"; break;
8793 default:
8794 abort ();
8795 }
8796
8797 /* Maybe we have a guess as to how likely the branch is.
8798 The old mnemonics don't have a way to specify this information. */
8799 pred = "";
8800 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
8801 if (note != NULL_RTX)
8802 {
8803 /* PROB is the difference from 50%. */
8804 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
8805 bool always_hint = rs6000_cpu != PROCESSOR_POWER4;
8806
8807 /* Only hint for highly probable/improbable branches on newer
8808 cpus as static prediction overrides processor dynamic
8809 prediction. For older cpus we may as well always hint, but
8810 assume not taken for branches that are very close to 50% as a
8811 mispredicted taken branch is more expensive than a
8812 mispredicted not-taken branch. */
8813 if (always_hint
8814 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
8815 {
8816 if (abs (prob) > REG_BR_PROB_BASE / 20
8817 && ((prob > 0) ^ need_longbranch))
8818 pred = "+";
8819 else
8820 pred = "-";
8821 }
8822 }
8823
8824 if (label == NULL)
8825 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
8826 else
8827 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
8828
8829 /* We need to escape any '%' characters in the reg_names string.
8830 Assume they'd only be the first character... */
8831 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
8832 *s++ = '%';
8833 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
8834
8835 if (label != NULL)
8836 {
8837 /* If the branch distance was too far, we may have to use an
8838 unconditional branch to go the distance. */
8839 if (need_longbranch)
8840 s += sprintf (s, ",$+8\n\tb %s", label);
8841 else
8842 s += sprintf (s, ",%s", label);
8843 }
8844
8845 return string;
8846 }
8847
8848 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
8849 operands of the last comparison is nonzero/true, FALSE_COND if it
8850 is zero/false. Return 0 if the hardware has no such operation. */
8851
8852 int
8853 rs6000_emit_cmove (dest, op, true_cond, false_cond)
8854 rtx dest;
8855 rtx op;
8856 rtx true_cond;
8857 rtx false_cond;
8858 {
8859 enum rtx_code code = GET_CODE (op);
8860 rtx op0 = rs6000_compare_op0;
8861 rtx op1 = rs6000_compare_op1;
8862 REAL_VALUE_TYPE c1;
8863 enum machine_mode compare_mode = GET_MODE (op0);
8864 enum machine_mode result_mode = GET_MODE (dest);
8865 rtx temp;
8866
8867 /* These modes should always match. */
8868 if (GET_MODE (op1) != compare_mode
8869 /* In the isel case however, we can use a compare immediate, so
8870 op1 may be a small constant. */
8871 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
8872 return 0;
8873 if (GET_MODE (true_cond) != result_mode)
8874 return 0;
8875 if (GET_MODE (false_cond) != result_mode)
8876 return 0;
8877
8878 /* First, work out if the hardware can do this at all, or
8879 if it's too slow... */
8880 if (! rs6000_compare_fp_p)
8881 {
8882 if (TARGET_ISEL)
8883 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
8884 return 0;
8885 }
8886
8887 /* Eliminate half of the comparisons by switching operands, this
8888 makes the remaining code simpler. */
8889 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
8890 || code == LTGT || code == LT)
8891 {
8892 code = reverse_condition_maybe_unordered (code);
8893 temp = true_cond;
8894 true_cond = false_cond;
8895 false_cond = temp;
8896 }
8897
8898 /* UNEQ and LTGT take four instructions for a comparison with zero,
8899 it'll probably be faster to use a branch here too. */
8900 if (code == UNEQ)
8901 return 0;
8902
8903 if (GET_CODE (op1) == CONST_DOUBLE)
8904 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
8905
8906 /* We're going to try to implement comparisons by performing
8907 a subtract, then comparing against zero. Unfortunately,
8908 Inf - Inf is NaN which is not zero, and so if we don't
8909 know that the operand is finite and the comparison
8910 would treat EQ different to UNORDERED, we can't do it. */
8911 if (! flag_unsafe_math_optimizations
8912 && code != GT && code != UNGE
8913 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
8914 /* Constructs of the form (a OP b ? a : b) are safe. */
8915 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
8916 || (! rtx_equal_p (op0, true_cond)
8917 && ! rtx_equal_p (op1, true_cond))))
8918 return 0;
8919 /* At this point we know we can use fsel. */
8920
8921 /* Reduce the comparison to a comparison against zero. */
8922 temp = gen_reg_rtx (compare_mode);
8923 emit_insn (gen_rtx_SET (VOIDmode, temp,
8924 gen_rtx_MINUS (compare_mode, op0, op1)));
8925 op0 = temp;
8926 op1 = CONST0_RTX (compare_mode);
8927
8928 /* If we don't care about NaNs we can reduce some of the comparisons
8929 down to faster ones. */
8930 if (flag_unsafe_math_optimizations)
8931 switch (code)
8932 {
8933 case GT:
8934 code = LE;
8935 temp = true_cond;
8936 true_cond = false_cond;
8937 false_cond = temp;
8938 break;
8939 case UNGE:
8940 code = GE;
8941 break;
8942 case UNEQ:
8943 code = EQ;
8944 break;
8945 default:
8946 break;
8947 }
8948
8949 /* Now, reduce everything down to a GE. */
8950 switch (code)
8951 {
8952 case GE:
8953 break;
8954
8955 case LE:
8956 temp = gen_reg_rtx (compare_mode);
8957 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8958 op0 = temp;
8959 break;
8960
8961 case ORDERED:
8962 temp = gen_reg_rtx (compare_mode);
8963 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
8964 op0 = temp;
8965 break;
8966
8967 case EQ:
8968 temp = gen_reg_rtx (compare_mode);
8969 emit_insn (gen_rtx_SET (VOIDmode, temp,
8970 gen_rtx_NEG (compare_mode,
8971 gen_rtx_ABS (compare_mode, op0))));
8972 op0 = temp;
8973 break;
8974
8975 case UNGE:
8976 temp = gen_reg_rtx (result_mode);
8977 emit_insn (gen_rtx_SET (VOIDmode, temp,
8978 gen_rtx_IF_THEN_ELSE (result_mode,
8979 gen_rtx_GE (VOIDmode,
8980 op0, op1),
8981 true_cond, false_cond)));
8982 false_cond = temp;
8983 true_cond = false_cond;
8984
8985 temp = gen_reg_rtx (compare_mode);
8986 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
8987 op0 = temp;
8988 break;
8989
8990 case GT:
8991 temp = gen_reg_rtx (result_mode);
8992 emit_insn (gen_rtx_SET (VOIDmode, temp,
8993 gen_rtx_IF_THEN_ELSE (result_mode,
8994 gen_rtx_GE (VOIDmode,
8995 op0, op1),
8996 true_cond, false_cond)));
8997 true_cond = temp;
8998 false_cond = true_cond;
8999
9000 temp = gen_reg_rtx (compare_mode);
9001 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9002 op0 = temp;
9003 break;
9004
9005 default:
9006 abort ();
9007 }
9008
9009 emit_insn (gen_rtx_SET (VOIDmode, dest,
9010 gen_rtx_IF_THEN_ELSE (result_mode,
9011 gen_rtx_GE (VOIDmode,
9012 op0, op1),
9013 true_cond, false_cond)));
9014 return 1;
9015 }
9016
9017 /* Same as above, but for ints (isel). */
9018
9019 static int
9020 rs6000_emit_int_cmove (dest, op, true_cond, false_cond)
9021 rtx dest;
9022 rtx op;
9023 rtx true_cond;
9024 rtx false_cond;
9025 {
9026 rtx condition_rtx, cr;
9027
9028 /* All isel implementations thus far are 32-bits. */
9029 if (GET_MODE (rs6000_compare_op0) != SImode)
9030 return 0;
9031
9032 /* We still have to do the compare, because isel doesn't do a
9033 compare, it just looks at the CRx bits set by a previous compare
9034 instruction. */
9035 condition_rtx = rs6000_generate_compare (GET_CODE (op));
9036 cr = XEXP (condition_rtx, 0);
9037
9038 if (GET_MODE (cr) == CCmode)
9039 emit_insn (gen_isel_signed (dest, condition_rtx,
9040 true_cond, false_cond, cr));
9041 else
9042 emit_insn (gen_isel_unsigned (dest, condition_rtx,
9043 true_cond, false_cond, cr));
9044
9045 return 1;
9046 }
9047
9048 const char *
9049 output_isel (operands)
9050 rtx *operands;
9051 {
9052 enum rtx_code code;
9053
9054 code = GET_CODE (operands[1]);
9055 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
9056 {
9057 PUT_CODE (operands[1], reverse_condition (code));
9058 return "isel %0,%3,%2,%j1";
9059 }
9060 else
9061 return "isel %0,%2,%3,%j1";
9062 }
9063
9064 void
9065 rs6000_emit_minmax (dest, code, op0, op1)
9066 rtx dest;
9067 enum rtx_code code;
9068 rtx op0;
9069 rtx op1;
9070 {
9071 enum machine_mode mode = GET_MODE (op0);
9072 enum rtx_code c;
9073 rtx target;
9074
9075 if (code == SMAX || code == SMIN)
9076 c = GE;
9077 else
9078 c = GEU;
9079
9080 if (code == SMAX || code == UMAX)
9081 target = emit_conditional_move (dest, c, op0, op1, mode,
9082 op0, op1, mode, 0);
9083 else
9084 target = emit_conditional_move (dest, c, op0, op1, mode,
9085 op1, op0, mode, 0);
9086 if (target == NULL_RTX)
9087 abort ();
9088 if (target != dest)
9089 emit_move_insn (dest, target);
9090 }
9091 \f
9092 /* This page contains routines that are used to determine what the
9093 function prologue and epilogue code will do and write them out. */
9094
9095 /* Return the first fixed-point register that is required to be
9096 saved. 32 if none. */
9097
9098 int
9099 first_reg_to_save ()
9100 {
9101 int first_reg;
9102
9103 /* Find lowest numbered live register. */
9104 for (first_reg = 13; first_reg <= 31; first_reg++)
9105 if (regs_ever_live[first_reg]
9106 && (! call_used_regs[first_reg]
9107 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
9108 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
9109 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
9110 break;
9111
9112 #if TARGET_MACHO
9113 if (flag_pic
9114 && current_function_uses_pic_offset_table
9115 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
9116 return RS6000_PIC_OFFSET_TABLE_REGNUM;
9117 #endif
9118
9119 return first_reg;
9120 }
9121
9122 /* Similar, for FP regs. */
9123
9124 int
9125 first_fp_reg_to_save ()
9126 {
9127 int first_reg;
9128
9129 /* Find lowest numbered live register. */
9130 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
9131 if (regs_ever_live[first_reg])
9132 break;
9133
9134 return first_reg;
9135 }
9136
9137 /* Similar, for AltiVec regs. */
9138
9139 static int
9140 first_altivec_reg_to_save ()
9141 {
9142 int i;
9143
9144 /* Stack frame remains as is unless we are in AltiVec ABI. */
9145 if (! TARGET_ALTIVEC_ABI)
9146 return LAST_ALTIVEC_REGNO + 1;
9147
9148 /* Find lowest numbered live register. */
9149 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
9150 if (regs_ever_live[i])
9151 break;
9152
9153 return i;
9154 }
9155
9156 /* Return a 32-bit mask of the AltiVec registers we need to set in
9157 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
9158 the 32-bit word is 0. */
9159
9160 static unsigned int
9161 compute_vrsave_mask ()
9162 {
9163 unsigned int i, mask = 0;
9164
9165 /* First, find out if we use _any_ altivec registers. */
9166 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
9167 if (regs_ever_live[i])
9168 mask |= ALTIVEC_REG_BIT (i);
9169
9170 if (mask == 0)
9171 return mask;
9172
9173 /* Next, remove the argument registers from the set. These must
9174 be in the VRSAVE mask set by the caller, so we don't need to add
9175 them in again. More importantly, the mask we compute here is
9176 used to generate CLOBBERs in the set_vrsave insn, and we do not
9177 wish the argument registers to die. */
9178 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
9179 mask &= ~ALTIVEC_REG_BIT (i);
9180
9181 /* Similarly, remove the return value from the set. */
9182 {
9183 bool yes = false;
9184 diddle_return_value (is_altivec_return_reg, &yes);
9185 if (yes)
9186 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
9187 }
9188
9189 return mask;
9190 }
9191
9192 static void
9193 is_altivec_return_reg (reg, xyes)
9194 rtx reg;
9195 void *xyes;
9196 {
9197 bool *yes = (bool *) xyes;
9198 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
9199 *yes = true;
9200 }
9201
9202 \f
9203 /* Calculate the stack information for the current function. This is
9204 complicated by having two separate calling sequences, the AIX calling
9205 sequence and the V.4 calling sequence.
9206
9207 AIX (and Darwin/Mac OS X) stack frames look like:
9208 32-bit 64-bit
9209 SP----> +---------------------------------------+
9210 | back chain to caller | 0 0
9211 +---------------------------------------+
9212 | saved CR | 4 8 (8-11)
9213 +---------------------------------------+
9214 | saved LR | 8 16
9215 +---------------------------------------+
9216 | reserved for compilers | 12 24
9217 +---------------------------------------+
9218 | reserved for binders | 16 32
9219 +---------------------------------------+
9220 | saved TOC pointer | 20 40
9221 +---------------------------------------+
9222 | Parameter save area (P) | 24 48
9223 +---------------------------------------+
9224 | Alloca space (A) | 24+P etc.
9225 +---------------------------------------+
9226 | Local variable space (L) | 24+P+A
9227 +---------------------------------------+
9228 | Float/int conversion temporary (X) | 24+P+A+L
9229 +---------------------------------------+
9230 | Save area for AltiVec registers (W) | 24+P+A+L+X
9231 +---------------------------------------+
9232 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
9233 +---------------------------------------+
9234 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
9235 +---------------------------------------+
9236 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
9237 +---------------------------------------+
9238 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
9239 +---------------------------------------+
9240 old SP->| back chain to caller's caller |
9241 +---------------------------------------+
9242
9243 The required alignment for AIX configurations is two words (i.e., 8
9244 or 16 bytes).
9245
9246
9247 V.4 stack frames look like:
9248
9249 SP----> +---------------------------------------+
9250 | back chain to caller | 0
9251 +---------------------------------------+
9252 | caller's saved LR | 4
9253 +---------------------------------------+
9254 | Parameter save area (P) | 8
9255 +---------------------------------------+
9256 | Alloca space (A) | 8+P
9257 +---------------------------------------+
9258 | Varargs save area (V) | 8+P+A
9259 +---------------------------------------+
9260 | Local variable space (L) | 8+P+A+V
9261 +---------------------------------------+
9262 | Float/int conversion temporary (X) | 8+P+A+V+L
9263 +---------------------------------------+
9264 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
9265 +---------------------------------------+
9266 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
9267 +---------------------------------------+
9268 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
9269 +---------------------------------------+
9270 | SPE: area for 64-bit GP registers |
9271 +---------------------------------------+
9272 | SPE alignment padding |
9273 +---------------------------------------+
9274 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
9275 +---------------------------------------+
9276 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
9277 +---------------------------------------+
9278 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
9279 +---------------------------------------+
9280 old SP->| back chain to caller's caller |
9281 +---------------------------------------+
9282
9283 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
9284 given. (But note below and in sysv4.h that we require only 8 and
9285 may round up the size of our stack frame anyways. The historical
9286 reason is early versions of powerpc-linux which didn't properly
9287 align the stack at program startup. A happy side-effect is that
9288 -mno-eabi libraries can be used with -meabi programs.)
9289
9290 The EABI configuration defaults to the V.4 layout, unless
9291 -mcall-aix is used, in which case the AIX layout is used. However,
9292 the stack alignment requirements may differ. If -mno-eabi is not
9293 given, the required stack alignment is 8 bytes; if -mno-eabi is
9294 given, the required alignment is 16 bytes. (But see V.4 comment
9295 above.) */
9296
9297 #ifndef ABI_STACK_BOUNDARY
9298 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
9299 #endif
9300
9301 rs6000_stack_t *
9302 rs6000_stack_info ()
9303 {
9304 static rs6000_stack_t info, zero_info;
9305 rs6000_stack_t *info_ptr = &info;
9306 int reg_size = TARGET_POWERPC64 ? 8 : 4;
9307 int ehrd_size;
9308 int total_raw_size;
9309
9310 /* Zero all fields portably. */
9311 info = zero_info;
9312
9313 if (TARGET_SPE)
9314 {
9315 /* Cache value so we don't rescan instruction chain over and over. */
9316 if (cfun->machine->insn_chain_scanned_p == 0)
9317 {
9318 cfun->machine->insn_chain_scanned_p = 1;
9319 info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
9320 }
9321 }
9322
9323 /* Select which calling sequence. */
9324 info_ptr->abi = DEFAULT_ABI;
9325
9326 /* Calculate which registers need to be saved & save area size. */
9327 info_ptr->first_gp_reg_save = first_reg_to_save ();
9328 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
9329 even if it currently looks like we won't. */
9330 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
9331 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
9332 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
9333 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
9334 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
9335 else
9336 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
9337
9338 /* For the SPE, we have an additional upper 32-bits on each GPR.
9339 Ideally we should save the entire 64-bits only when the upper
9340 half is used in SIMD instructions. Since we only record
9341 registers live (not the size they are used in), this proves
9342 difficult because we'd have to traverse the instruction chain at
9343 the right time, taking reload into account. This is a real pain,
9344 so we opt to save the GPRs in 64-bits always if but one register
9345 gets used in 64-bits. Otherwise, all the registers in the frame
9346 get saved in 32-bits.
9347
9348 So... since when we save all GPRs (except the SP) in 64-bits, the
9349 traditional GP save area will be empty. */
9350 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9351 info_ptr->gp_size = 0;
9352
9353 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
9354 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
9355
9356 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
9357 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
9358 - info_ptr->first_altivec_reg_save);
9359
9360 /* Does this function call anything? */
9361 info_ptr->calls_p = (! current_function_is_leaf
9362 || cfun->machine->ra_needs_full_frame);
9363
9364 /* Determine if we need to save the link register. */
9365 if (rs6000_ra_ever_killed ()
9366 || (DEFAULT_ABI == ABI_AIX
9367 && current_function_profile
9368 && !TARGET_PROFILE_KERNEL)
9369 #ifdef TARGET_RELOCATABLE
9370 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
9371 #endif
9372 || (info_ptr->first_fp_reg_save != 64
9373 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
9374 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
9375 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
9376 || (DEFAULT_ABI == ABI_DARWIN
9377 && flag_pic
9378 && current_function_uses_pic_offset_table)
9379 || info_ptr->calls_p)
9380 {
9381 info_ptr->lr_save_p = 1;
9382 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
9383 }
9384
9385 /* Determine if we need to save the condition code registers. */
9386 if (regs_ever_live[CR2_REGNO]
9387 || regs_ever_live[CR3_REGNO]
9388 || regs_ever_live[CR4_REGNO])
9389 {
9390 info_ptr->cr_save_p = 1;
9391 if (DEFAULT_ABI == ABI_V4)
9392 info_ptr->cr_size = reg_size;
9393 }
9394
9395 /* If the current function calls __builtin_eh_return, then we need
9396 to allocate stack space for registers that will hold data for
9397 the exception handler. */
9398 if (current_function_calls_eh_return)
9399 {
9400 unsigned int i;
9401 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
9402 continue;
9403
9404 /* SPE saves EH registers in 64-bits. */
9405 ehrd_size = i * (TARGET_SPE_ABI
9406 && info_ptr->spe_64bit_regs_used != 0
9407 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
9408 }
9409 else
9410 ehrd_size = 0;
9411
9412 /* Determine various sizes. */
9413 info_ptr->reg_size = reg_size;
9414 info_ptr->fixed_size = RS6000_SAVE_AREA;
9415 info_ptr->varargs_size = RS6000_VARARGS_AREA;
9416 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
9417 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
9418 8);
9419
9420 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9421 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
9422 else
9423 info_ptr->spe_gp_size = 0;
9424
9425 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
9426 {
9427 info_ptr->vrsave_mask = compute_vrsave_mask ();
9428 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
9429 }
9430 else
9431 {
9432 info_ptr->vrsave_mask = 0;
9433 info_ptr->vrsave_size = 0;
9434 }
9435
9436 /* Calculate the offsets. */
9437 switch (DEFAULT_ABI)
9438 {
9439 case ABI_NONE:
9440 default:
9441 abort ();
9442
9443 case ABI_AIX:
9444 case ABI_AIX_NODESC:
9445 case ABI_DARWIN:
9446 info_ptr->fp_save_offset = - info_ptr->fp_size;
9447 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9448
9449 if (TARGET_ALTIVEC_ABI)
9450 {
9451 info_ptr->vrsave_save_offset
9452 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
9453
9454 /* Align stack so vector save area is on a quadword boundary. */
9455 if (info_ptr->altivec_size != 0)
9456 info_ptr->altivec_padding_size
9457 = 16 - (-info_ptr->vrsave_save_offset % 16);
9458 else
9459 info_ptr->altivec_padding_size = 0;
9460
9461 info_ptr->altivec_save_offset
9462 = info_ptr->vrsave_save_offset
9463 - info_ptr->altivec_padding_size
9464 - info_ptr->altivec_size;
9465
9466 /* Adjust for AltiVec case. */
9467 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
9468 }
9469 else
9470 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
9471 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
9472 info_ptr->lr_save_offset = 2*reg_size;
9473 break;
9474
9475 case ABI_V4:
9476 info_ptr->fp_save_offset = - info_ptr->fp_size;
9477 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9478 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
9479
9480 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9481 {
9482 /* Align stack so SPE GPR save area is aligned on a
9483 double-word boundary. */
9484 if (info_ptr->spe_gp_size != 0)
9485 info_ptr->spe_padding_size
9486 = 8 - (-info_ptr->cr_save_offset % 8);
9487 else
9488 info_ptr->spe_padding_size = 0;
9489
9490 info_ptr->spe_gp_save_offset
9491 = info_ptr->cr_save_offset
9492 - info_ptr->spe_padding_size
9493 - info_ptr->spe_gp_size;
9494
9495 /* Adjust for SPE case. */
9496 info_ptr->toc_save_offset
9497 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
9498 }
9499 else if (TARGET_ALTIVEC_ABI)
9500 {
9501 info_ptr->vrsave_save_offset
9502 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
9503
9504 /* Align stack so vector save area is on a quadword boundary. */
9505 if (info_ptr->altivec_size != 0)
9506 info_ptr->altivec_padding_size
9507 = 16 - (-info_ptr->vrsave_save_offset % 16);
9508 else
9509 info_ptr->altivec_padding_size = 0;
9510
9511 info_ptr->altivec_save_offset
9512 = info_ptr->vrsave_save_offset
9513 - info_ptr->altivec_padding_size
9514 - info_ptr->altivec_size;
9515
9516 /* Adjust for AltiVec case. */
9517 info_ptr->toc_save_offset
9518 = info_ptr->altivec_save_offset - info_ptr->toc_size;
9519 }
9520 else
9521 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
9522 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
9523 info_ptr->lr_save_offset = reg_size;
9524 break;
9525 }
9526
9527 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
9528 + info_ptr->gp_size
9529 + info_ptr->altivec_size
9530 + info_ptr->altivec_padding_size
9531 + info_ptr->vrsave_size
9532 + info_ptr->spe_gp_size
9533 + info_ptr->spe_padding_size
9534 + ehrd_size
9535 + info_ptr->cr_size
9536 + info_ptr->lr_size
9537 + info_ptr->vrsave_size
9538 + info_ptr->toc_size,
9539 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
9540 ? 16 : 8);
9541
9542 total_raw_size = (info_ptr->vars_size
9543 + info_ptr->parm_size
9544 + info_ptr->save_size
9545 + info_ptr->varargs_size
9546 + info_ptr->fixed_size);
9547
9548 info_ptr->total_size =
9549 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
9550
9551 /* Determine if we need to allocate any stack frame:
9552
9553 For AIX we need to push the stack if a frame pointer is needed
9554 (because the stack might be dynamically adjusted), if we are
9555 debugging, if we make calls, or if the sum of fp_save, gp_save,
9556 and local variables are more than the space needed to save all
9557 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
9558 + 18*8 = 288 (GPR13 reserved).
9559
9560 For V.4 we don't have the stack cushion that AIX uses, but assume
9561 that the debugger can handle stackless frames. */
9562
9563 if (info_ptr->calls_p)
9564 info_ptr->push_p = 1;
9565
9566 else if (DEFAULT_ABI == ABI_V4)
9567 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
9568
9569 else if (frame_pointer_needed)
9570 info_ptr->push_p = 1;
9571
9572 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
9573 info_ptr->push_p = 1;
9574
9575 else
9576 info_ptr->push_p
9577 = total_raw_size - info_ptr->fixed_size > (TARGET_32BIT ? 220 : 288);
9578
9579 /* Zero offsets if we're not saving those registers. */
9580 if (info_ptr->fp_size == 0)
9581 info_ptr->fp_save_offset = 0;
9582
9583 if (info_ptr->gp_size == 0)
9584 info_ptr->gp_save_offset = 0;
9585
9586 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
9587 info_ptr->altivec_save_offset = 0;
9588
9589 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
9590 info_ptr->vrsave_save_offset = 0;
9591
9592 if (! TARGET_SPE_ABI
9593 || info_ptr->spe_64bit_regs_used == 0
9594 || info_ptr->spe_gp_size == 0)
9595 info_ptr->spe_gp_save_offset = 0;
9596
9597 if (! info_ptr->lr_save_p)
9598 info_ptr->lr_save_offset = 0;
9599
9600 if (! info_ptr->cr_save_p)
9601 info_ptr->cr_save_offset = 0;
9602
9603 if (! info_ptr->toc_save_p)
9604 info_ptr->toc_save_offset = 0;
9605
9606 return info_ptr;
9607 }
9608
9609 /* Return true if the current function uses any GPRs in 64-bit SIMD
9610 mode. */
9611
9612 static bool
9613 spe_func_has_64bit_regs_p ()
9614 {
9615 rtx insns, insn;
9616
9617 /* Functions that save and restore all the call-saved registers will
9618 need to save/restore the registers in 64-bits. */
9619 if (current_function_calls_eh_return
9620 || current_function_calls_setjmp
9621 || current_function_has_nonlocal_goto)
9622 return true;
9623
9624 insns = get_insns ();
9625
9626 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
9627 {
9628 if (INSN_P (insn))
9629 {
9630 rtx i;
9631
9632 i = PATTERN (insn);
9633 if (GET_CODE (i) == SET
9634 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
9635 return true;
9636 }
9637 }
9638
9639 return false;
9640 }
9641
9642 void
9643 debug_stack_info (info)
9644 rs6000_stack_t *info;
9645 {
9646 const char *abi_string;
9647
9648 if (! info)
9649 info = rs6000_stack_info ();
9650
9651 fprintf (stderr, "\nStack information for function %s:\n",
9652 ((current_function_decl && DECL_NAME (current_function_decl))
9653 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
9654 : "<unknown>"));
9655
9656 switch (info->abi)
9657 {
9658 default: abi_string = "Unknown"; break;
9659 case ABI_NONE: abi_string = "NONE"; break;
9660 case ABI_AIX:
9661 case ABI_AIX_NODESC: abi_string = "AIX"; break;
9662 case ABI_DARWIN: abi_string = "Darwin"; break;
9663 case ABI_V4: abi_string = "V.4"; break;
9664 }
9665
9666 fprintf (stderr, "\tABI = %5s\n", abi_string);
9667
9668 if (TARGET_ALTIVEC_ABI)
9669 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
9670
9671 if (TARGET_SPE_ABI)
9672 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
9673
9674 if (info->first_gp_reg_save != 32)
9675 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
9676
9677 if (info->first_fp_reg_save != 64)
9678 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
9679
9680 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
9681 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
9682 info->first_altivec_reg_save);
9683
9684 if (info->lr_save_p)
9685 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
9686
9687 if (info->cr_save_p)
9688 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
9689
9690 if (info->toc_save_p)
9691 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
9692
9693 if (info->vrsave_mask)
9694 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
9695
9696 if (info->push_p)
9697 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
9698
9699 if (info->calls_p)
9700 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
9701
9702 if (info->gp_save_offset)
9703 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
9704
9705 if (info->fp_save_offset)
9706 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
9707
9708 if (info->altivec_save_offset)
9709 fprintf (stderr, "\taltivec_save_offset = %5d\n",
9710 info->altivec_save_offset);
9711
9712 if (info->spe_gp_save_offset)
9713 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
9714 info->spe_gp_save_offset);
9715
9716 if (info->vrsave_save_offset)
9717 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
9718 info->vrsave_save_offset);
9719
9720 if (info->lr_save_offset)
9721 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
9722
9723 if (info->cr_save_offset)
9724 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
9725
9726 if (info->toc_save_offset)
9727 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
9728
9729 if (info->varargs_save_offset)
9730 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
9731
9732 if (info->total_size)
9733 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
9734
9735 if (info->varargs_size)
9736 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
9737
9738 if (info->vars_size)
9739 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
9740
9741 if (info->parm_size)
9742 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
9743
9744 if (info->fixed_size)
9745 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
9746
9747 if (info->gp_size)
9748 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
9749
9750 if (info->spe_gp_size)
9751 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
9752
9753 if (info->fp_size)
9754 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
9755
9756 if (info->altivec_size)
9757 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
9758
9759 if (info->vrsave_size)
9760 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
9761
9762 if (info->altivec_padding_size)
9763 fprintf (stderr, "\taltivec_padding_size= %5d\n",
9764 info->altivec_padding_size);
9765
9766 if (info->spe_padding_size)
9767 fprintf (stderr, "\tspe_padding_size = %5d\n",
9768 info->spe_padding_size);
9769
9770 if (info->lr_size)
9771 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
9772
9773 if (info->cr_size)
9774 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
9775
9776 if (info->toc_size)
9777 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
9778
9779 if (info->save_size)
9780 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
9781
9782 if (info->reg_size != 4)
9783 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
9784
9785 fprintf (stderr, "\n");
9786 }
9787
9788 rtx
9789 rs6000_return_addr (count, frame)
9790 int count;
9791 rtx frame;
9792 {
9793 /* Currently we don't optimize very well between prolog and body
9794 code and for PIC code the code can be actually quite bad, so
9795 don't try to be too clever here. */
9796 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
9797 {
9798 cfun->machine->ra_needs_full_frame = 1;
9799
9800 return
9801 gen_rtx_MEM
9802 (Pmode,
9803 memory_address
9804 (Pmode,
9805 plus_constant (copy_to_reg
9806 (gen_rtx_MEM (Pmode,
9807 memory_address (Pmode, frame))),
9808 RETURN_ADDRESS_OFFSET)));
9809 }
9810
9811 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
9812 }
9813
9814 /* Say whether a function is a candidate for sibcall handling or not.
9815 We do not allow indirect calls to be optimized into sibling calls.
9816 Also, we can't do it if there are any vector parameters; there's
9817 nowhere to put the VRsave code so it works; note that functions with
9818 vector parameters are required to have a prototype, so the argument
9819 type info must be available here. (The tail recursion case can work
9820 with vector parameters, but there's no way to distinguish here.) */
9821 static bool
9822 rs6000_function_ok_for_sibcall (decl, exp)
9823 tree decl;
9824 tree exp ATTRIBUTE_UNUSED;
9825 {
9826 tree type;
9827 if (decl)
9828 {
9829 if (TARGET_ALTIVEC_VRSAVE)
9830 {
9831 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
9832 type; type = TREE_CHAIN (type))
9833 {
9834 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
9835 return false;
9836 }
9837 }
9838 if (DEFAULT_ABI == ABI_DARWIN
9839 || (*targetm.binds_local_p) (decl))
9840 {
9841 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
9842
9843 if (!lookup_attribute ("longcall", attr_list)
9844 || lookup_attribute ("shortcall", attr_list))
9845 return true;
9846 }
9847 }
9848 return false;
9849 }
9850
9851 static int
9852 rs6000_ra_ever_killed ()
9853 {
9854 rtx top;
9855 rtx reg;
9856 rtx insn;
9857
9858 /* Irritatingly, there are two kinds of thunks -- those created with
9859 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
9860 through the regular part of the compiler. This is a very hacky
9861 way to tell them apart. */
9862 if (current_function_is_thunk && !no_new_pseudos)
9863 return 0;
9864
9865 /* regs_ever_live has LR marked as used if any sibcalls are present,
9866 but this should not force saving and restoring in the
9867 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
9868 clobbers LR, so that is inappropriate. */
9869
9870 /* Also, the prologue can generate a store into LR that
9871 doesn't really count, like this:
9872
9873 move LR->R0
9874 bcl to set PIC register
9875 move LR->R31
9876 move R0->LR
9877
9878 When we're called from the epilogue, we need to avoid counting
9879 this as a store. */
9880
9881 push_topmost_sequence ();
9882 top = get_insns ();
9883 pop_topmost_sequence ();
9884 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
9885
9886 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
9887 {
9888 if (INSN_P (insn))
9889 {
9890 if (FIND_REG_INC_NOTE (insn, reg))
9891 return 1;
9892 else if (GET_CODE (insn) == CALL_INSN
9893 && !SIBLING_CALL_P (insn))
9894 return 1;
9895 else if (set_of (reg, insn) != NULL_RTX
9896 && !prologue_epilogue_contains (insn))
9897 return 1;
9898 }
9899 }
9900 return 0;
9901 }
9902 \f
9903 /* Add a REG_MAYBE_DEAD note to the insn. */
9904 static void
9905 rs6000_maybe_dead (insn)
9906 rtx insn;
9907 {
9908 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
9909 const0_rtx,
9910 REG_NOTES (insn));
9911 }
9912
9913 /* Emit instructions needed to load the TOC register.
9914 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
9915 a constant pool; or for SVR4 -fpic. */
9916
9917 void
9918 rs6000_emit_load_toc_table (fromprolog)
9919 int fromprolog;
9920 {
9921 rtx dest, insn;
9922 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
9923
9924 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9925 {
9926 rtx temp = (fromprolog
9927 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
9928 : gen_reg_rtx (Pmode));
9929 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
9930 if (fromprolog)
9931 rs6000_maybe_dead (insn);
9932 insn = emit_move_insn (dest, temp);
9933 if (fromprolog)
9934 rs6000_maybe_dead (insn);
9935 }
9936 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
9937 {
9938 char buf[30];
9939 rtx tempLR = (fromprolog
9940 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
9941 : gen_reg_rtx (Pmode));
9942 rtx temp0 = (fromprolog
9943 ? gen_rtx_REG (Pmode, 0)
9944 : gen_reg_rtx (Pmode));
9945 rtx symF;
9946
9947 /* possibly create the toc section */
9948 if (! toc_initialized)
9949 {
9950 toc_section ();
9951 function_section (current_function_decl);
9952 }
9953
9954 if (fromprolog)
9955 {
9956 rtx symL;
9957
9958 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
9959 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9960
9961 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
9962 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9963
9964 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
9965 symF)));
9966 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
9967 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
9968 symL,
9969 symF)));
9970 }
9971 else
9972 {
9973 rtx tocsym;
9974 static int reload_toc_labelno = 0;
9975
9976 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
9977
9978 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
9979 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9980
9981 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
9982 emit_move_insn (dest, tempLR);
9983 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
9984 }
9985 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
9986 if (fromprolog)
9987 rs6000_maybe_dead (insn);
9988 }
9989 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
9990 {
9991 /* This is for AIX code running in non-PIC ELF32. */
9992 char buf[30];
9993 rtx realsym;
9994 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
9995 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
9996
9997 insn = emit_insn (gen_elf_high (dest, realsym));
9998 if (fromprolog)
9999 rs6000_maybe_dead (insn);
10000 insn = emit_insn (gen_elf_low (dest, dest, realsym));
10001 if (fromprolog)
10002 rs6000_maybe_dead (insn);
10003 }
10004 else if (DEFAULT_ABI == ABI_AIX)
10005 {
10006 if (TARGET_32BIT)
10007 insn = emit_insn (gen_load_toc_aix_si (dest));
10008 else
10009 insn = emit_insn (gen_load_toc_aix_di (dest));
10010 if (fromprolog)
10011 rs6000_maybe_dead (insn);
10012 }
10013 else
10014 abort ();
10015 }
10016
10017 int
10018 get_TOC_alias_set ()
10019 {
10020 static int set = -1;
10021 if (set == -1)
10022 set = new_alias_set ();
10023 return set;
10024 }
10025
10026 /* This retuns nonzero if the current function uses the TOC. This is
10027 determined by the presence of (unspec ... 7), which is generated by
10028 the various load_toc_* patterns. */
10029
10030 int
10031 uses_TOC ()
10032 {
10033 rtx insn;
10034
10035 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
10036 if (INSN_P (insn))
10037 {
10038 rtx pat = PATTERN (insn);
10039 int i;
10040
10041 if (GET_CODE (pat) == PARALLEL)
10042 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
10043 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
10044 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
10045 return 1;
10046 }
10047 return 0;
10048 }
10049
10050 rtx
10051 create_TOC_reference (symbol)
10052 rtx symbol;
10053 {
10054 return gen_rtx_PLUS (Pmode,
10055 gen_rtx_REG (Pmode, TOC_REGISTER),
10056 gen_rtx_CONST (Pmode,
10057 gen_rtx_MINUS (Pmode, symbol,
10058 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
10059 }
10060
10061 #if TARGET_AIX
10062 /* __throw will restore its own return address to be the same as the
10063 return address of the function that the throw is being made to.
10064 This is unfortunate, because we want to check the original
10065 return address to see if we need to restore the TOC.
10066 So we have to squirrel it away here.
10067 This is used only in compiling __throw and __rethrow.
10068
10069 Most of this code should be removed by CSE. */
10070 static rtx insn_after_throw;
10071
10072 /* This does the saving... */
10073 void
10074 rs6000_aix_emit_builtin_unwind_init ()
10075 {
10076 rtx mem;
10077 rtx stack_top = gen_reg_rtx (Pmode);
10078 rtx opcode_addr = gen_reg_rtx (Pmode);
10079
10080 insn_after_throw = gen_reg_rtx (SImode);
10081
10082 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
10083 emit_move_insn (stack_top, mem);
10084
10085 mem = gen_rtx_MEM (Pmode,
10086 gen_rtx_PLUS (Pmode, stack_top,
10087 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
10088 emit_move_insn (opcode_addr, mem);
10089 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
10090 }
10091
10092 /* Emit insns to _restore_ the TOC register, at runtime (specifically
10093 in _eh.o). Only used on AIX.
10094
10095 The idea is that on AIX, function calls look like this:
10096 bl somefunction-trampoline
10097 lwz r2,20(sp)
10098
10099 and later,
10100 somefunction-trampoline:
10101 stw r2,20(sp)
10102 ... load function address in the count register ...
10103 bctr
10104 or like this, if the linker determines that this is not a cross-module call
10105 and so the TOC need not be restored:
10106 bl somefunction
10107 nop
10108 or like this, if the compiler could determine that this is not a
10109 cross-module call:
10110 bl somefunction
10111 now, the tricky bit here is that register 2 is saved and restored
10112 by the _linker_, so we can't readily generate debugging information
10113 for it. So we need to go back up the call chain looking at the
10114 insns at return addresses to see which calls saved the TOC register
10115 and so see where it gets restored from.
10116
10117 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
10118 just before the actual epilogue.
10119
10120 On the bright side, this incurs no space or time overhead unless an
10121 exception is thrown, except for the extra code in libgcc.a.
10122
10123 The parameter STACKSIZE is a register containing (at runtime)
10124 the amount to be popped off the stack in addition to the stack frame
10125 of this routine (which will be __throw or __rethrow, and so is
10126 guaranteed to have a stack frame). */
10127
10128 void
10129 rs6000_emit_eh_toc_restore (stacksize)
10130 rtx stacksize;
10131 {
10132 rtx top_of_stack;
10133 rtx bottom_of_stack = gen_reg_rtx (Pmode);
10134 rtx tocompare = gen_reg_rtx (SImode);
10135 rtx opcode = gen_reg_rtx (SImode);
10136 rtx opcode_addr = gen_reg_rtx (Pmode);
10137 rtx mem;
10138 rtx loop_start = gen_label_rtx ();
10139 rtx no_toc_restore_needed = gen_label_rtx ();
10140 rtx loop_exit = gen_label_rtx ();
10141
10142 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
10143 set_mem_alias_set (mem, rs6000_sr_alias_set);
10144 emit_move_insn (bottom_of_stack, mem);
10145
10146 top_of_stack = expand_binop (Pmode, add_optab,
10147 bottom_of_stack, stacksize,
10148 NULL_RTX, 1, OPTAB_WIDEN);
10149
10150 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
10151 : 0xE8410028, SImode));
10152
10153 if (insn_after_throw == NULL_RTX)
10154 abort ();
10155 emit_move_insn (opcode, insn_after_throw);
10156
10157 emit_note (NULL, NOTE_INSN_LOOP_BEG);
10158 emit_label (loop_start);
10159
10160 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
10161 SImode, NULL_RTX, NULL_RTX,
10162 no_toc_restore_needed);
10163
10164 mem = gen_rtx_MEM (Pmode,
10165 gen_rtx_PLUS (Pmode, bottom_of_stack,
10166 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
10167 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
10168
10169 emit_label (no_toc_restore_needed);
10170 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
10171 Pmode, NULL_RTX, NULL_RTX,
10172 loop_exit);
10173
10174 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
10175 set_mem_alias_set (mem, rs6000_sr_alias_set);
10176 emit_move_insn (bottom_of_stack, mem);
10177
10178 mem = gen_rtx_MEM (Pmode,
10179 gen_rtx_PLUS (Pmode, bottom_of_stack,
10180 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
10181 emit_move_insn (opcode_addr, mem);
10182 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
10183
10184 emit_note (NULL, NOTE_INSN_LOOP_CONT);
10185 emit_jump (loop_start);
10186 emit_note (NULL, NOTE_INSN_LOOP_END);
10187 emit_label (loop_exit);
10188 }
10189 #endif /* TARGET_AIX */
10190 \f
10191 /* This ties together stack memory (MEM with an alias set of
10192 rs6000_sr_alias_set) and the change to the stack pointer. */
10193
10194 static void
10195 rs6000_emit_stack_tie ()
10196 {
10197 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
10198
10199 set_mem_alias_set (mem, rs6000_sr_alias_set);
10200 emit_insn (gen_stack_tie (mem));
10201 }
10202
10203 /* Emit the correct code for allocating stack space, as insns.
10204 If COPY_R12, make sure a copy of the old frame is left in r12.
10205 The generated code may use hard register 0 as a temporary. */
10206
10207 static void
10208 rs6000_emit_allocate_stack (size, copy_r12)
10209 HOST_WIDE_INT size;
10210 int copy_r12;
10211 {
10212 rtx insn;
10213 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10214 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
10215 rtx todec = GEN_INT (-size);
10216
10217 if (current_function_limit_stack)
10218 {
10219 if (REG_P (stack_limit_rtx)
10220 && REGNO (stack_limit_rtx) > 1
10221 && REGNO (stack_limit_rtx) <= 31)
10222 {
10223 emit_insn (TARGET_32BIT
10224 ? gen_addsi3 (tmp_reg,
10225 stack_limit_rtx,
10226 GEN_INT (size))
10227 : gen_adddi3 (tmp_reg,
10228 stack_limit_rtx,
10229 GEN_INT (size)));
10230
10231 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10232 const0_rtx));
10233 }
10234 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
10235 && TARGET_32BIT
10236 && DEFAULT_ABI == ABI_V4)
10237 {
10238 rtx toload = gen_rtx_CONST (VOIDmode,
10239 gen_rtx_PLUS (Pmode,
10240 stack_limit_rtx,
10241 GEN_INT (size)));
10242
10243 emit_insn (gen_elf_high (tmp_reg, toload));
10244 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
10245 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10246 const0_rtx));
10247 }
10248 else
10249 warning ("stack limit expression is not supported");
10250 }
10251
10252 if (copy_r12 || ! TARGET_UPDATE)
10253 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
10254
10255 if (TARGET_UPDATE)
10256 {
10257 if (size > 32767)
10258 {
10259 /* Need a note here so that try_split doesn't get confused. */
10260 if (get_last_insn() == NULL_RTX)
10261 emit_note (0, NOTE_INSN_DELETED);
10262 insn = emit_move_insn (tmp_reg, todec);
10263 try_split (PATTERN (insn), insn, 0);
10264 todec = tmp_reg;
10265 }
10266
10267 insn = emit_insn (TARGET_32BIT
10268 ? gen_movsi_update (stack_reg, stack_reg,
10269 todec, stack_reg)
10270 : gen_movdi_update (stack_reg, stack_reg,
10271 todec, stack_reg));
10272 }
10273 else
10274 {
10275 insn = emit_insn (TARGET_32BIT
10276 ? gen_addsi3 (stack_reg, stack_reg, todec)
10277 : gen_adddi3 (stack_reg, stack_reg, todec));
10278 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
10279 gen_rtx_REG (Pmode, 12));
10280 }
10281
10282 RTX_FRAME_RELATED_P (insn) = 1;
10283 REG_NOTES (insn) =
10284 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10285 gen_rtx_SET (VOIDmode, stack_reg,
10286 gen_rtx_PLUS (Pmode, stack_reg,
10287 GEN_INT (-size))),
10288 REG_NOTES (insn));
10289 }
10290
10291 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
10292 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
10293 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
10294 deduce these equivalences by itself so it wasn't necessary to hold
10295 its hand so much. */
10296
10297 static void
10298 rs6000_frame_related (insn, reg, val, reg2, rreg)
10299 rtx insn;
10300 rtx reg;
10301 HOST_WIDE_INT val;
10302 rtx reg2;
10303 rtx rreg;
10304 {
10305 rtx real, temp;
10306
10307 /* copy_rtx will not make unique copies of registers, so we need to
10308 ensure we don't have unwanted sharing here. */
10309 if (reg == reg2)
10310 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10311
10312 if (reg == rreg)
10313 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10314
10315 real = copy_rtx (PATTERN (insn));
10316
10317 if (reg2 != NULL_RTX)
10318 real = replace_rtx (real, reg2, rreg);
10319
10320 real = replace_rtx (real, reg,
10321 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
10322 STACK_POINTER_REGNUM),
10323 GEN_INT (val)));
10324
10325 /* We expect that 'real' is either a SET or a PARALLEL containing
10326 SETs (and possibly other stuff). In a PARALLEL, all the SETs
10327 are important so they all have to be marked RTX_FRAME_RELATED_P. */
10328
10329 if (GET_CODE (real) == SET)
10330 {
10331 rtx set = real;
10332
10333 temp = simplify_rtx (SET_SRC (set));
10334 if (temp)
10335 SET_SRC (set) = temp;
10336 temp = simplify_rtx (SET_DEST (set));
10337 if (temp)
10338 SET_DEST (set) = temp;
10339 if (GET_CODE (SET_DEST (set)) == MEM)
10340 {
10341 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10342 if (temp)
10343 XEXP (SET_DEST (set), 0) = temp;
10344 }
10345 }
10346 else if (GET_CODE (real) == PARALLEL)
10347 {
10348 int i;
10349 for (i = 0; i < XVECLEN (real, 0); i++)
10350 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
10351 {
10352 rtx set = XVECEXP (real, 0, i);
10353
10354 temp = simplify_rtx (SET_SRC (set));
10355 if (temp)
10356 SET_SRC (set) = temp;
10357 temp = simplify_rtx (SET_DEST (set));
10358 if (temp)
10359 SET_DEST (set) = temp;
10360 if (GET_CODE (SET_DEST (set)) == MEM)
10361 {
10362 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10363 if (temp)
10364 XEXP (SET_DEST (set), 0) = temp;
10365 }
10366 RTX_FRAME_RELATED_P (set) = 1;
10367 }
10368 }
10369 else
10370 abort ();
10371
10372 if (TARGET_SPE)
10373 real = spe_synthesize_frame_save (real);
10374
10375 RTX_FRAME_RELATED_P (insn) = 1;
10376 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10377 real,
10378 REG_NOTES (insn));
10379 }
10380
10381 /* Given an SPE frame note, return a PARALLEL of SETs with the
10382 original note, plus a synthetic register save. */
10383
10384 static rtx
10385 spe_synthesize_frame_save (real)
10386 rtx real;
10387 {
10388 rtx synth, offset, reg, real2;
10389
10390 if (GET_CODE (real) != SET
10391 || GET_MODE (SET_SRC (real)) != V2SImode)
10392 return real;
10393
10394 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
10395 frame related note. The parallel contains a set of the register
10396 being saved, and another set to a synthetic register (n+1200).
10397 This is so we can differentiate between 64-bit and 32-bit saves.
10398 Words cannot describe this nastiness. */
10399
10400 if (GET_CODE (SET_DEST (real)) != MEM
10401 || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
10402 || GET_CODE (SET_SRC (real)) != REG)
10403 abort ();
10404
10405 /* Transform:
10406 (set (mem (plus (reg x) (const y)))
10407 (reg z))
10408 into:
10409 (set (mem (plus (reg x) (const y+4)))
10410 (reg z+1200))
10411 */
10412
10413 real2 = copy_rtx (real);
10414 PUT_MODE (SET_DEST (real2), SImode);
10415 reg = SET_SRC (real2);
10416 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
10417 synth = copy_rtx (real2);
10418
10419 if (BYTES_BIG_ENDIAN)
10420 {
10421 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
10422 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
10423 }
10424
10425 reg = SET_SRC (synth);
10426
10427 synth = replace_rtx (synth, reg,
10428 gen_rtx_REG (SImode, REGNO (reg) + 1200));
10429
10430 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
10431 synth = replace_rtx (synth, offset,
10432 GEN_INT (INTVAL (offset)
10433 + (BYTES_BIG_ENDIAN ? 0 : 4)));
10434
10435 RTX_FRAME_RELATED_P (synth) = 1;
10436 RTX_FRAME_RELATED_P (real2) = 1;
10437 if (BYTES_BIG_ENDIAN)
10438 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
10439 else
10440 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
10441
10442 return real;
10443 }
10444
10445 /* Returns an insn that has a vrsave set operation with the
10446 appropriate CLOBBERs. */
10447
10448 static rtx
10449 generate_set_vrsave (reg, info, epiloguep)
10450 rtx reg;
10451 rs6000_stack_t *info;
10452 int epiloguep;
10453 {
10454 int nclobs, i;
10455 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
10456 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10457
10458 clobs[0]
10459 = gen_rtx_SET (VOIDmode,
10460 vrsave,
10461 gen_rtx_UNSPEC_VOLATILE (SImode,
10462 gen_rtvec (2, reg, vrsave),
10463 30));
10464
10465 nclobs = 1;
10466
10467 /* We need to clobber the registers in the mask so the scheduler
10468 does not move sets to VRSAVE before sets of AltiVec registers.
10469
10470 However, if the function receives nonlocal gotos, reload will set
10471 all call saved registers live. We will end up with:
10472
10473 (set (reg 999) (mem))
10474 (parallel [ (set (reg vrsave) (unspec blah))
10475 (clobber (reg 999))])
10476
10477 The clobber will cause the store into reg 999 to be dead, and
10478 flow will attempt to delete an epilogue insn. In this case, we
10479 need an unspec use/set of the register. */
10480
10481 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
10482 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
10483 {
10484 if (!epiloguep || call_used_regs [i])
10485 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
10486 gen_rtx_REG (V4SImode, i));
10487 else
10488 {
10489 rtx reg = gen_rtx_REG (V4SImode, i);
10490
10491 clobs[nclobs++]
10492 = gen_rtx_SET (VOIDmode,
10493 reg,
10494 gen_rtx_UNSPEC (V4SImode,
10495 gen_rtvec (1, reg), 27));
10496 }
10497 }
10498
10499 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
10500
10501 for (i = 0; i < nclobs; ++i)
10502 XVECEXP (insn, 0, i) = clobs[i];
10503
10504 return insn;
10505 }
10506
10507 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
10508 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
10509
10510 static void
10511 emit_frame_save (frame_reg, frame_ptr, mode, regno, offset, total_size)
10512 rtx frame_reg;
10513 rtx frame_ptr;
10514 enum machine_mode mode;
10515 unsigned int regno;
10516 int offset;
10517 int total_size;
10518 {
10519 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
10520 rtx replacea, replaceb;
10521
10522 int_rtx = GEN_INT (offset);
10523
10524 /* Some cases that need register indexed addressing. */
10525 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
10526 || (TARGET_SPE_ABI
10527 && SPE_VECTOR_MODE (mode)
10528 && !SPE_CONST_OFFSET_OK (offset)))
10529 {
10530 /* Whomever calls us must make sure r11 is available in the
10531 flow path of instructions in the prologue. */
10532 offset_rtx = gen_rtx_REG (Pmode, 11);
10533 emit_move_insn (offset_rtx, int_rtx);
10534
10535 replacea = offset_rtx;
10536 replaceb = int_rtx;
10537 }
10538 else
10539 {
10540 offset_rtx = int_rtx;
10541 replacea = NULL_RTX;
10542 replaceb = NULL_RTX;
10543 }
10544
10545 reg = gen_rtx_REG (mode, regno);
10546 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
10547 mem = gen_rtx_MEM (mode, addr);
10548 set_mem_alias_set (mem, rs6000_sr_alias_set);
10549
10550 insn = emit_move_insn (mem, reg);
10551
10552 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
10553 }
10554
10555 /* Emit an offset memory reference suitable for a frame store, while
10556 converting to a valid addressing mode. */
10557
10558 static rtx
10559 gen_frame_mem_offset (mode, reg, offset)
10560 enum machine_mode mode;
10561 rtx reg;
10562 int offset;
10563 {
10564 rtx int_rtx, offset_rtx;
10565
10566 int_rtx = GEN_INT (offset);
10567
10568 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
10569 {
10570 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10571 emit_move_insn (offset_rtx, int_rtx);
10572 }
10573 else
10574 offset_rtx = int_rtx;
10575
10576 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
10577 }
10578
10579 /* Emit function prologue as insns. */
10580
10581 void
10582 rs6000_emit_prologue ()
10583 {
10584 rs6000_stack_t *info = rs6000_stack_info ();
10585 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
10586 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10587 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10588 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
10589 rtx frame_reg_rtx = sp_reg_rtx;
10590 rtx cr_save_rtx = NULL;
10591 rtx insn;
10592 int saving_FPRs_inline;
10593 int using_store_multiple;
10594 HOST_WIDE_INT sp_offset = 0;
10595
10596 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
10597 {
10598 reg_mode = V2SImode;
10599 reg_size = 8;
10600 }
10601
10602 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
10603 && (!TARGET_SPE_ABI
10604 || info->spe_64bit_regs_used == 0)
10605 && info->first_gp_reg_save < 31);
10606 saving_FPRs_inline = (info->first_fp_reg_save == 64
10607 || FP_SAVE_INLINE (info->first_fp_reg_save));
10608
10609 /* For V.4, update stack before we do any saving and set back pointer. */
10610 if (info->push_p && DEFAULT_ABI == ABI_V4)
10611 {
10612 if (info->total_size < 32767)
10613 sp_offset = info->total_size;
10614 else
10615 frame_reg_rtx = frame_ptr_rtx;
10616 rs6000_emit_allocate_stack (info->total_size,
10617 (frame_reg_rtx != sp_reg_rtx
10618 && (info->cr_save_p
10619 || info->lr_save_p
10620 || info->first_fp_reg_save < 64
10621 || info->first_gp_reg_save < 32
10622 )));
10623 if (frame_reg_rtx != sp_reg_rtx)
10624 rs6000_emit_stack_tie ();
10625 }
10626
10627 /* Save AltiVec registers if needed. */
10628 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
10629 {
10630 int i;
10631
10632 /* There should be a non inline version of this, for when we
10633 are saving lots of vector registers. */
10634 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
10635 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
10636 {
10637 rtx areg, savereg, mem;
10638 int offset;
10639
10640 offset = info->altivec_save_offset + sp_offset
10641 + 16 * (i - info->first_altivec_reg_save);
10642
10643 savereg = gen_rtx_REG (V4SImode, i);
10644
10645 areg = gen_rtx_REG (Pmode, 0);
10646 emit_move_insn (areg, GEN_INT (offset));
10647
10648 /* AltiVec addressing mode is [reg+reg]. */
10649 mem = gen_rtx_MEM (V4SImode,
10650 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
10651
10652 set_mem_alias_set (mem, rs6000_sr_alias_set);
10653
10654 insn = emit_move_insn (mem, savereg);
10655
10656 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10657 areg, GEN_INT (offset));
10658 }
10659 }
10660
10661 /* VRSAVE is a bit vector representing which AltiVec registers
10662 are used. The OS uses this to determine which vector
10663 registers to save on a context switch. We need to save
10664 VRSAVE on the stack frame, add whatever AltiVec registers we
10665 used in this function, and do the corresponding magic in the
10666 epilogue. */
10667
10668 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
10669 {
10670 rtx reg, mem, vrsave;
10671 int offset;
10672
10673 /* Get VRSAVE onto a GPR. */
10674 reg = gen_rtx_REG (SImode, 12);
10675 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10676 if (TARGET_MACHO)
10677 emit_insn (gen_get_vrsave_internal (reg));
10678 else
10679 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
10680
10681 /* Save VRSAVE. */
10682 offset = info->vrsave_save_offset + sp_offset;
10683 mem
10684 = gen_rtx_MEM (SImode,
10685 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
10686 set_mem_alias_set (mem, rs6000_sr_alias_set);
10687 insn = emit_move_insn (mem, reg);
10688
10689 /* Include the registers in the mask. */
10690 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
10691
10692 insn = emit_insn (generate_set_vrsave (reg, info, 0));
10693 }
10694
10695 /* If we use the link register, get it into r0. */
10696 if (info->lr_save_p)
10697 emit_move_insn (gen_rtx_REG (Pmode, 0),
10698 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10699
10700 /* If we need to save CR, put it into r12. */
10701 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
10702 {
10703 cr_save_rtx = gen_rtx_REG (SImode, 12);
10704 emit_insn (gen_movesi_from_cr (cr_save_rtx));
10705 }
10706
10707 /* Do any required saving of fpr's. If only one or two to save, do
10708 it ourselves. Otherwise, call function. */
10709 if (saving_FPRs_inline)
10710 {
10711 int i;
10712 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10713 if ((regs_ever_live[info->first_fp_reg_save+i]
10714 && ! call_used_regs[info->first_fp_reg_save+i]))
10715 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
10716 info->first_fp_reg_save + i,
10717 info->fp_save_offset + sp_offset + 8 * i,
10718 info->total_size);
10719 }
10720 else if (info->first_fp_reg_save != 64)
10721 {
10722 int i;
10723 char rname[30];
10724 const char *alloc_rname;
10725 rtvec p;
10726 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
10727
10728 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
10729 gen_rtx_REG (Pmode,
10730 LINK_REGISTER_REGNUM));
10731 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
10732 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
10733 alloc_rname = ggc_strdup (rname);
10734 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
10735 gen_rtx_SYMBOL_REF (Pmode,
10736 alloc_rname));
10737 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
10738 {
10739 rtx addr, reg, mem;
10740 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
10741 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10742 GEN_INT (info->fp_save_offset
10743 + sp_offset + 8*i));
10744 mem = gen_rtx_MEM (DFmode, addr);
10745 set_mem_alias_set (mem, rs6000_sr_alias_set);
10746
10747 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
10748 }
10749 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10750 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10751 NULL_RTX, NULL_RTX);
10752 }
10753
10754 /* Save GPRs. This is done as a PARALLEL if we are using
10755 the store-multiple instructions. */
10756 if (using_store_multiple)
10757 {
10758 rtvec p;
10759 int i;
10760 p = rtvec_alloc (32 - info->first_gp_reg_save);
10761 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10762 {
10763 rtx addr, reg, mem;
10764 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
10765 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10766 GEN_INT (info->gp_save_offset
10767 + sp_offset
10768 + reg_size * i));
10769 mem = gen_rtx_MEM (reg_mode, addr);
10770 set_mem_alias_set (mem, rs6000_sr_alias_set);
10771
10772 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
10773 }
10774 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
10775 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10776 NULL_RTX, NULL_RTX);
10777 }
10778 else
10779 {
10780 int i;
10781 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
10782 if ((regs_ever_live[info->first_gp_reg_save+i]
10783 && ! call_used_regs[info->first_gp_reg_save+i])
10784 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
10785 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
10786 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
10787 {
10788 rtx addr, reg, mem;
10789 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
10790
10791 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
10792 {
10793 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
10794 rtx b;
10795
10796 if (!SPE_CONST_OFFSET_OK (offset))
10797 {
10798 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
10799 emit_move_insn (b, GEN_INT (offset));
10800 }
10801 else
10802 b = GEN_INT (offset);
10803
10804 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
10805 mem = gen_rtx_MEM (V2SImode, addr);
10806 set_mem_alias_set (mem, rs6000_sr_alias_set);
10807 insn = emit_move_insn (mem, reg);
10808
10809 if (GET_CODE (b) == CONST_INT)
10810 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10811 NULL_RTX, NULL_RTX);
10812 else
10813 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10814 b, GEN_INT (offset));
10815 }
10816 else
10817 {
10818 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10819 GEN_INT (info->gp_save_offset
10820 + sp_offset
10821 + reg_size * i));
10822 mem = gen_rtx_MEM (reg_mode, addr);
10823 set_mem_alias_set (mem, rs6000_sr_alias_set);
10824
10825 insn = emit_move_insn (mem, reg);
10826 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10827 NULL_RTX, NULL_RTX);
10828 }
10829 }
10830 }
10831
10832 /* ??? There's no need to emit actual instructions here, but it's the
10833 easiest way to get the frame unwind information emitted. */
10834 if (current_function_calls_eh_return)
10835 {
10836 unsigned int i, regno;
10837
10838 for (i = 0; ; ++i)
10839 {
10840 regno = EH_RETURN_DATA_REGNO (i);
10841 if (regno == INVALID_REGNUM)
10842 break;
10843
10844 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
10845 info->ehrd_offset + sp_offset
10846 + reg_size * (int) i,
10847 info->total_size);
10848 }
10849 }
10850
10851 /* Save lr if we used it. */
10852 if (info->lr_save_p)
10853 {
10854 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10855 GEN_INT (info->lr_save_offset + sp_offset));
10856 rtx reg = gen_rtx_REG (Pmode, 0);
10857 rtx mem = gen_rtx_MEM (Pmode, addr);
10858 /* This should not be of rs6000_sr_alias_set, because of
10859 __builtin_return_address. */
10860
10861 insn = emit_move_insn (mem, reg);
10862 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10863 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10864 }
10865
10866 /* Save CR if we use any that must be preserved. */
10867 if (info->cr_save_p)
10868 {
10869 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
10870 GEN_INT (info->cr_save_offset + sp_offset));
10871 rtx mem = gen_rtx_MEM (SImode, addr);
10872
10873 set_mem_alias_set (mem, rs6000_sr_alias_set);
10874
10875 /* If r12 was used to hold the original sp, copy cr into r0 now
10876 that it's free. */
10877 if (REGNO (frame_reg_rtx) == 12)
10878 {
10879 cr_save_rtx = gen_rtx_REG (SImode, 0);
10880 emit_insn (gen_movesi_from_cr (cr_save_rtx));
10881 }
10882 insn = emit_move_insn (mem, cr_save_rtx);
10883
10884 /* Now, there's no way that dwarf2out_frame_debug_expr is going
10885 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
10886 OK. All we have to do is specify that _one_ condition code
10887 register is saved in this stack slot. The thrower's epilogue
10888 will then restore all the call-saved registers.
10889 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
10890 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
10891 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
10892 }
10893
10894 /* Update stack and set back pointer unless this is V.4,
10895 for which it was done previously. */
10896 if (info->push_p && DEFAULT_ABI != ABI_V4)
10897 rs6000_emit_allocate_stack (info->total_size, FALSE);
10898
10899 /* Set frame pointer, if needed. */
10900 if (frame_pointer_needed)
10901 {
10902 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
10903 sp_reg_rtx);
10904 RTX_FRAME_RELATED_P (insn) = 1;
10905 }
10906
10907 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
10908 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
10909 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
10910 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
10911 {
10912 /* If emit_load_toc_table will use the link register, we need to save
10913 it. We use R11 for this purpose because emit_load_toc_table
10914 can use register 0. This allows us to use a plain 'blr' to return
10915 from the procedure more often. */
10916 int save_LR_around_toc_setup = (TARGET_ELF
10917 && DEFAULT_ABI != ABI_AIX
10918 && flag_pic
10919 && ! info->lr_save_p
10920 && EXIT_BLOCK_PTR->pred != NULL);
10921 if (save_LR_around_toc_setup)
10922 emit_move_insn (gen_rtx_REG (Pmode, 11),
10923 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
10924
10925 rs6000_emit_load_toc_table (TRUE);
10926
10927 if (save_LR_around_toc_setup)
10928 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
10929 gen_rtx_REG (Pmode, 11));
10930 }
10931
10932 #if TARGET_MACHO
10933 if (DEFAULT_ABI == ABI_DARWIN
10934 && flag_pic && current_function_uses_pic_offset_table)
10935 {
10936 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
10937 const char *picbase = machopic_function_base_name ();
10938 rtx src = gen_rtx_SYMBOL_REF (Pmode, picbase);
10939
10940 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
10941
10942 rs6000_maybe_dead (
10943 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
10944 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
10945 }
10946 #endif
10947 }
10948
10949 /* Write function prologue. */
10950
10951 static void
10952 rs6000_output_function_prologue (file, size)
10953 FILE *file;
10954 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
10955 {
10956 rs6000_stack_t *info = rs6000_stack_info ();
10957
10958 if (TARGET_DEBUG_STACK)
10959 debug_stack_info (info);
10960
10961 /* Write .extern for any function we will call to save and restore
10962 fp values. */
10963 if (info->first_fp_reg_save < 64
10964 && !FP_SAVE_INLINE (info->first_fp_reg_save))
10965 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
10966 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
10967 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
10968 RESTORE_FP_SUFFIX);
10969
10970 /* Write .extern for AIX common mode routines, if needed. */
10971 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
10972 {
10973 fputs ("\t.extern __mulh\n", file);
10974 fputs ("\t.extern __mull\n", file);
10975 fputs ("\t.extern __divss\n", file);
10976 fputs ("\t.extern __divus\n", file);
10977 fputs ("\t.extern __quoss\n", file);
10978 fputs ("\t.extern __quous\n", file);
10979 common_mode_defined = 1;
10980 }
10981
10982 if (! HAVE_prologue)
10983 {
10984 start_sequence ();
10985
10986 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
10987 the "toplevel" insn chain. */
10988 emit_note (0, NOTE_INSN_DELETED);
10989 rs6000_emit_prologue ();
10990 emit_note (0, NOTE_INSN_DELETED);
10991
10992 /* Expand INSN_ADDRESSES so final() doesn't crash. */
10993 {
10994 rtx insn;
10995 unsigned addr = 0;
10996 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
10997 {
10998 INSN_ADDRESSES_NEW (insn, addr);
10999 addr += 4;
11000 }
11001 }
11002
11003 if (TARGET_DEBUG_STACK)
11004 debug_rtx_list (get_insns (), 100);
11005 final (get_insns (), file, FALSE, FALSE);
11006 end_sequence ();
11007 }
11008
11009 rs6000_pic_labelno++;
11010 }
11011
11012 /* Emit function epilogue as insns.
11013
11014 At present, dwarf2out_frame_debug_expr doesn't understand
11015 register restores, so we don't bother setting RTX_FRAME_RELATED_P
11016 anywhere in the epilogue. Most of the insns below would in any case
11017 need special notes to explain where r11 is in relation to the stack. */
11018
11019 void
11020 rs6000_emit_epilogue (sibcall)
11021 int sibcall;
11022 {
11023 rs6000_stack_t *info;
11024 int restoring_FPRs_inline;
11025 int using_load_multiple;
11026 int using_mfcr_multiple;
11027 int use_backchain_to_restore_sp;
11028 int sp_offset = 0;
11029 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
11030 rtx frame_reg_rtx = sp_reg_rtx;
11031 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
11032 int reg_size = TARGET_POWERPC64 ? 8 : 4;
11033 int i;
11034
11035 info = rs6000_stack_info ();
11036
11037 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11038 {
11039 reg_mode = V2SImode;
11040 reg_size = 8;
11041 }
11042
11043 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11044 && (!TARGET_SPE_ABI
11045 || info->spe_64bit_regs_used == 0)
11046 && info->first_gp_reg_save < 31);
11047 restoring_FPRs_inline = (sibcall
11048 || current_function_calls_eh_return
11049 || info->first_fp_reg_save == 64
11050 || FP_SAVE_INLINE (info->first_fp_reg_save));
11051 use_backchain_to_restore_sp = (frame_pointer_needed
11052 || current_function_calls_alloca
11053 || info->total_size > 32767);
11054 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
11055 || rs6000_cpu == PROCESSOR_PPC603
11056 || rs6000_cpu == PROCESSOR_PPC750
11057 || optimize_size);
11058
11059 /* If we have a frame pointer, a call to alloca, or a large stack
11060 frame, restore the old stack pointer using the backchain. Otherwise,
11061 we know what size to update it with. */
11062 if (use_backchain_to_restore_sp)
11063 {
11064 /* Under V.4, don't reset the stack pointer until after we're done
11065 loading the saved registers. */
11066 if (DEFAULT_ABI == ABI_V4)
11067 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
11068
11069 emit_move_insn (frame_reg_rtx,
11070 gen_rtx_MEM (Pmode, sp_reg_rtx));
11071
11072 }
11073 else if (info->push_p)
11074 {
11075 if (DEFAULT_ABI == ABI_V4)
11076 sp_offset = info->total_size;
11077 else
11078 {
11079 emit_insn (TARGET_32BIT
11080 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11081 GEN_INT (info->total_size))
11082 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11083 GEN_INT (info->total_size)));
11084 }
11085 }
11086
11087 /* Restore AltiVec registers if needed. */
11088 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11089 {
11090 int i;
11091
11092 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11093 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11094 {
11095 rtx addr, areg, mem;
11096
11097 areg = gen_rtx_REG (Pmode, 0);
11098 emit_move_insn
11099 (areg, GEN_INT (info->altivec_save_offset
11100 + sp_offset
11101 + 16 * (i - info->first_altivec_reg_save)));
11102
11103 /* AltiVec addressing mode is [reg+reg]. */
11104 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
11105 mem = gen_rtx_MEM (V4SImode, addr);
11106 set_mem_alias_set (mem, rs6000_sr_alias_set);
11107
11108 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
11109 }
11110 }
11111
11112 /* Restore VRSAVE if needed. */
11113 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
11114 {
11115 rtx addr, mem, reg;
11116
11117 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11118 GEN_INT (info->vrsave_save_offset + sp_offset));
11119 mem = gen_rtx_MEM (SImode, addr);
11120 set_mem_alias_set (mem, rs6000_sr_alias_set);
11121 reg = gen_rtx_REG (SImode, 12);
11122 emit_move_insn (reg, mem);
11123
11124 emit_insn (generate_set_vrsave (reg, info, 1));
11125 }
11126
11127 /* Get the old lr if we saved it. */
11128 if (info->lr_save_p)
11129 {
11130 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
11131 info->lr_save_offset + sp_offset);
11132
11133 set_mem_alias_set (mem, rs6000_sr_alias_set);
11134
11135 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
11136 }
11137
11138 /* Get the old cr if we saved it. */
11139 if (info->cr_save_p)
11140 {
11141 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11142 GEN_INT (info->cr_save_offset + sp_offset));
11143 rtx mem = gen_rtx_MEM (SImode, addr);
11144
11145 set_mem_alias_set (mem, rs6000_sr_alias_set);
11146
11147 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
11148 }
11149
11150 /* Set LR here to try to overlap restores below. */
11151 if (info->lr_save_p)
11152 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
11153 gen_rtx_REG (Pmode, 0));
11154
11155 /* Load exception handler data registers, if needed. */
11156 if (current_function_calls_eh_return)
11157 {
11158 unsigned int i, regno;
11159
11160 for (i = 0; ; ++i)
11161 {
11162 rtx mem;
11163
11164 regno = EH_RETURN_DATA_REGNO (i);
11165 if (regno == INVALID_REGNUM)
11166 break;
11167
11168 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
11169 info->ehrd_offset + sp_offset
11170 + reg_size * (int) i);
11171 set_mem_alias_set (mem, rs6000_sr_alias_set);
11172
11173 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
11174 }
11175 }
11176
11177 /* Restore GPRs. This is done as a PARALLEL if we are using
11178 the load-multiple instructions. */
11179 if (using_load_multiple)
11180 {
11181 rtvec p;
11182 p = rtvec_alloc (32 - info->first_gp_reg_save);
11183 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11184 {
11185 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11186 GEN_INT (info->gp_save_offset
11187 + sp_offset
11188 + reg_size * i));
11189 rtx mem = gen_rtx_MEM (reg_mode, addr);
11190
11191 set_mem_alias_set (mem, rs6000_sr_alias_set);
11192
11193 RTVEC_ELT (p, i) =
11194 gen_rtx_SET (VOIDmode,
11195 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
11196 mem);
11197 }
11198 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11199 }
11200 else
11201 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11202 if ((regs_ever_live[info->first_gp_reg_save+i]
11203 && ! call_used_regs[info->first_gp_reg_save+i])
11204 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
11205 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11206 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
11207 {
11208 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11209 GEN_INT (info->gp_save_offset
11210 + sp_offset
11211 + reg_size * i));
11212 rtx mem = gen_rtx_MEM (reg_mode, addr);
11213
11214 /* Restore 64-bit quantities for SPE. */
11215 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11216 {
11217 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
11218 rtx b;
11219
11220 if (!SPE_CONST_OFFSET_OK (offset))
11221 {
11222 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11223 emit_move_insn (b, GEN_INT (offset));
11224 }
11225 else
11226 b = GEN_INT (offset);
11227
11228 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
11229 mem = gen_rtx_MEM (V2SImode, addr);
11230 }
11231
11232 set_mem_alias_set (mem, rs6000_sr_alias_set);
11233
11234 emit_move_insn (gen_rtx_REG (reg_mode,
11235 info->first_gp_reg_save + i), mem);
11236 }
11237
11238 /* Restore fpr's if we need to do it without calling a function. */
11239 if (restoring_FPRs_inline)
11240 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11241 if ((regs_ever_live[info->first_fp_reg_save+i]
11242 && ! call_used_regs[info->first_fp_reg_save+i]))
11243 {
11244 rtx addr, mem;
11245 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11246 GEN_INT (info->fp_save_offset
11247 + sp_offset
11248 + 8 * i));
11249 mem = gen_rtx_MEM (DFmode, addr);
11250 set_mem_alias_set (mem, rs6000_sr_alias_set);
11251
11252 emit_move_insn (gen_rtx_REG (DFmode,
11253 info->first_fp_reg_save + i),
11254 mem);
11255 }
11256
11257 /* If we saved cr, restore it here. Just those that were used. */
11258 if (info->cr_save_p)
11259 {
11260 rtx r12_rtx = gen_rtx_REG (SImode, 12);
11261 int count = 0;
11262
11263 if (using_mfcr_multiple)
11264 {
11265 for (i = 0; i < 8; i++)
11266 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11267 count++;
11268 if (count == 0)
11269 abort ();
11270 }
11271
11272 if (using_mfcr_multiple && count > 1)
11273 {
11274 rtvec p;
11275 int ndx;
11276
11277 p = rtvec_alloc (count);
11278
11279 ndx = 0;
11280 for (i = 0; i < 8; i++)
11281 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11282 {
11283 rtvec r = rtvec_alloc (2);
11284 RTVEC_ELT (r, 0) = r12_rtx;
11285 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
11286 RTVEC_ELT (p, ndx) =
11287 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
11288 gen_rtx_UNSPEC (CCmode, r, 20));
11289 ndx++;
11290 }
11291 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11292 if (ndx != count)
11293 abort ();
11294 }
11295 else
11296 for (i = 0; i < 8; i++)
11297 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11298 {
11299 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
11300 CR0_REGNO+i),
11301 r12_rtx));
11302 }
11303 }
11304
11305 /* If this is V.4, unwind the stack pointer after all of the loads
11306 have been done. We need to emit a block here so that sched
11307 doesn't decide to move the sp change before the register restores
11308 (which may not have any obvious dependency on the stack). This
11309 doesn't hurt performance, because there is no scheduling that can
11310 be done after this point. */
11311 if (DEFAULT_ABI == ABI_V4)
11312 {
11313 if (frame_reg_rtx != sp_reg_rtx)
11314 rs6000_emit_stack_tie ();
11315
11316 if (use_backchain_to_restore_sp)
11317 {
11318 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
11319 }
11320 else if (sp_offset != 0)
11321 {
11322 emit_insn (TARGET_32BIT
11323 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11324 GEN_INT (sp_offset))
11325 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11326 GEN_INT (sp_offset)));
11327 }
11328 }
11329
11330 if (current_function_calls_eh_return)
11331 {
11332 rtx sa = EH_RETURN_STACKADJ_RTX;
11333 emit_insn (TARGET_32BIT
11334 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
11335 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
11336 }
11337
11338 if (!sibcall)
11339 {
11340 rtvec p;
11341 if (! restoring_FPRs_inline)
11342 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
11343 else
11344 p = rtvec_alloc (2);
11345
11346 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
11347 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11348 gen_rtx_REG (Pmode,
11349 LINK_REGISTER_REGNUM));
11350
11351 /* If we have to restore more than two FP registers, branch to the
11352 restore function. It will return to our caller. */
11353 if (! restoring_FPRs_inline)
11354 {
11355 int i;
11356 char rname[30];
11357 const char *alloc_rname;
11358
11359 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
11360 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
11361 alloc_rname = ggc_strdup (rname);
11362 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
11363 gen_rtx_SYMBOL_REF (Pmode,
11364 alloc_rname));
11365
11366 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11367 {
11368 rtx addr, mem;
11369 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
11370 GEN_INT (info->fp_save_offset + 8*i));
11371 mem = gen_rtx_MEM (DFmode, addr);
11372 set_mem_alias_set (mem, rs6000_sr_alias_set);
11373
11374 RTVEC_ELT (p, i+3) =
11375 gen_rtx_SET (VOIDmode,
11376 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
11377 mem);
11378 }
11379 }
11380
11381 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
11382 }
11383 }
11384
11385 /* Write function epilogue. */
11386
11387 static void
11388 rs6000_output_function_epilogue (file, size)
11389 FILE *file;
11390 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
11391 {
11392 rs6000_stack_t *info = rs6000_stack_info ();
11393
11394 if (! HAVE_epilogue)
11395 {
11396 rtx insn = get_last_insn ();
11397 /* If the last insn was a BARRIER, we don't have to write anything except
11398 the trace table. */
11399 if (GET_CODE (insn) == NOTE)
11400 insn = prev_nonnote_insn (insn);
11401 if (insn == 0 || GET_CODE (insn) != BARRIER)
11402 {
11403 /* This is slightly ugly, but at least we don't have two
11404 copies of the epilogue-emitting code. */
11405 start_sequence ();
11406
11407 /* A NOTE_INSN_DELETED is supposed to be at the start
11408 and end of the "toplevel" insn chain. */
11409 emit_note (0, NOTE_INSN_DELETED);
11410 rs6000_emit_epilogue (FALSE);
11411 emit_note (0, NOTE_INSN_DELETED);
11412
11413 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11414 {
11415 rtx insn;
11416 unsigned addr = 0;
11417 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11418 {
11419 INSN_ADDRESSES_NEW (insn, addr);
11420 addr += 4;
11421 }
11422 }
11423
11424 if (TARGET_DEBUG_STACK)
11425 debug_rtx_list (get_insns (), 100);
11426 final (get_insns (), file, FALSE, FALSE);
11427 end_sequence ();
11428 }
11429 }
11430
11431 /* Output a traceback table here. See /usr/include/sys/debug.h for info
11432 on its format.
11433
11434 We don't output a traceback table if -finhibit-size-directive was
11435 used. The documentation for -finhibit-size-directive reads
11436 ``don't output a @code{.size} assembler directive, or anything
11437 else that would cause trouble if the function is split in the
11438 middle, and the two halves are placed at locations far apart in
11439 memory.'' The traceback table has this property, since it
11440 includes the offset from the start of the function to the
11441 traceback table itself.
11442
11443 System V.4 Powerpc's (and the embedded ABI derived from it) use a
11444 different traceback table. */
11445 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
11446 && rs6000_traceback != traceback_none)
11447 {
11448 const char *fname = NULL;
11449 const char *language_string = lang_hooks.name;
11450 int fixed_parms = 0, float_parms = 0, parm_info = 0;
11451 int i;
11452 int optional_tbtab;
11453
11454 if (rs6000_traceback == traceback_full)
11455 optional_tbtab = 1;
11456 else if (rs6000_traceback == traceback_part)
11457 optional_tbtab = 0;
11458 else
11459 optional_tbtab = !optimize_size && !TARGET_ELF;
11460
11461 if (optional_tbtab)
11462 {
11463 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
11464 while (*fname == '.') /* V.4 encodes . in the name */
11465 fname++;
11466
11467 /* Need label immediately before tbtab, so we can compute
11468 its offset from the function start. */
11469 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
11470 ASM_OUTPUT_LABEL (file, fname);
11471 }
11472
11473 /* The .tbtab pseudo-op can only be used for the first eight
11474 expressions, since it can't handle the possibly variable
11475 length fields that follow. However, if you omit the optional
11476 fields, the assembler outputs zeros for all optional fields
11477 anyways, giving each variable length field is minimum length
11478 (as defined in sys/debug.h). Thus we can not use the .tbtab
11479 pseudo-op at all. */
11480
11481 /* An all-zero word flags the start of the tbtab, for debuggers
11482 that have to find it by searching forward from the entry
11483 point or from the current pc. */
11484 fputs ("\t.long 0\n", file);
11485
11486 /* Tbtab format type. Use format type 0. */
11487 fputs ("\t.byte 0,", file);
11488
11489 /* Language type. Unfortunately, there doesn't seem to be any
11490 official way to get this info, so we use language_string. C
11491 is 0. C++ is 9. No number defined for Obj-C, so use the
11492 value for C for now. There is no official value for Java,
11493 although IBM appears to be using 13. There is no official value
11494 for Chill, so we've chosen 44 pseudo-randomly. */
11495 if (! strcmp (language_string, "GNU C")
11496 || ! strcmp (language_string, "GNU Objective-C"))
11497 i = 0;
11498 else if (! strcmp (language_string, "GNU F77"))
11499 i = 1;
11500 else if (! strcmp (language_string, "GNU Ada"))
11501 i = 3;
11502 else if (! strcmp (language_string, "GNU Pascal"))
11503 i = 2;
11504 else if (! strcmp (language_string, "GNU C++"))
11505 i = 9;
11506 else if (! strcmp (language_string, "GNU Java"))
11507 i = 13;
11508 else if (! strcmp (language_string, "GNU CHILL"))
11509 i = 44;
11510 else
11511 abort ();
11512 fprintf (file, "%d,", i);
11513
11514 /* 8 single bit fields: global linkage (not set for C extern linkage,
11515 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
11516 from start of procedure stored in tbtab, internal function, function
11517 has controlled storage, function has no toc, function uses fp,
11518 function logs/aborts fp operations. */
11519 /* Assume that fp operations are used if any fp reg must be saved. */
11520 fprintf (file, "%d,",
11521 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
11522
11523 /* 6 bitfields: function is interrupt handler, name present in
11524 proc table, function calls alloca, on condition directives
11525 (controls stack walks, 3 bits), saves condition reg, saves
11526 link reg. */
11527 /* The `function calls alloca' bit seems to be set whenever reg 31 is
11528 set up as a frame pointer, even when there is no alloca call. */
11529 fprintf (file, "%d,",
11530 ((optional_tbtab << 6)
11531 | ((optional_tbtab & frame_pointer_needed) << 5)
11532 | (info->cr_save_p << 1)
11533 | (info->lr_save_p)));
11534
11535 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
11536 (6 bits). */
11537 fprintf (file, "%d,",
11538 (info->push_p << 7) | (64 - info->first_fp_reg_save));
11539
11540 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
11541 fprintf (file, "%d,", (32 - first_reg_to_save ()));
11542
11543 if (optional_tbtab)
11544 {
11545 /* Compute the parameter info from the function decl argument
11546 list. */
11547 tree decl;
11548 int next_parm_info_bit = 31;
11549
11550 for (decl = DECL_ARGUMENTS (current_function_decl);
11551 decl; decl = TREE_CHAIN (decl))
11552 {
11553 rtx parameter = DECL_INCOMING_RTL (decl);
11554 enum machine_mode mode = GET_MODE (parameter);
11555
11556 if (GET_CODE (parameter) == REG)
11557 {
11558 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
11559 {
11560 int bits;
11561
11562 float_parms++;
11563
11564 if (mode == SFmode)
11565 bits = 0x2;
11566 else if (mode == DFmode || mode == TFmode)
11567 bits = 0x3;
11568 else
11569 abort ();
11570
11571 /* If only one bit will fit, don't or in this entry. */
11572 if (next_parm_info_bit > 0)
11573 parm_info |= (bits << (next_parm_info_bit - 1));
11574 next_parm_info_bit -= 2;
11575 }
11576 else
11577 {
11578 fixed_parms += ((GET_MODE_SIZE (mode)
11579 + (UNITS_PER_WORD - 1))
11580 / UNITS_PER_WORD);
11581 next_parm_info_bit -= 1;
11582 }
11583 }
11584 }
11585 }
11586
11587 /* Number of fixed point parameters. */
11588 /* This is actually the number of words of fixed point parameters; thus
11589 an 8 byte struct counts as 2; and thus the maximum value is 8. */
11590 fprintf (file, "%d,", fixed_parms);
11591
11592 /* 2 bitfields: number of floating point parameters (7 bits), parameters
11593 all on stack. */
11594 /* This is actually the number of fp registers that hold parameters;
11595 and thus the maximum value is 13. */
11596 /* Set parameters on stack bit if parameters are not in their original
11597 registers, regardless of whether they are on the stack? Xlc
11598 seems to set the bit when not optimizing. */
11599 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
11600
11601 if (! optional_tbtab)
11602 return;
11603
11604 /* Optional fields follow. Some are variable length. */
11605
11606 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
11607 11 double float. */
11608 /* There is an entry for each parameter in a register, in the order that
11609 they occur in the parameter list. Any intervening arguments on the
11610 stack are ignored. If the list overflows a long (max possible length
11611 34 bits) then completely leave off all elements that don't fit. */
11612 /* Only emit this long if there was at least one parameter. */
11613 if (fixed_parms || float_parms)
11614 fprintf (file, "\t.long %d\n", parm_info);
11615
11616 /* Offset from start of code to tb table. */
11617 fputs ("\t.long ", file);
11618 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
11619 #if TARGET_AIX
11620 RS6000_OUTPUT_BASENAME (file, fname);
11621 #else
11622 assemble_name (file, fname);
11623 #endif
11624 fputs ("-.", file);
11625 #if TARGET_AIX
11626 RS6000_OUTPUT_BASENAME (file, fname);
11627 #else
11628 assemble_name (file, fname);
11629 #endif
11630 putc ('\n', file);
11631
11632 /* Interrupt handler mask. */
11633 /* Omit this long, since we never set the interrupt handler bit
11634 above. */
11635
11636 /* Number of CTL (controlled storage) anchors. */
11637 /* Omit this long, since the has_ctl bit is never set above. */
11638
11639 /* Displacement into stack of each CTL anchor. */
11640 /* Omit this list of longs, because there are no CTL anchors. */
11641
11642 /* Length of function name. */
11643 if (*fname == '*')
11644 ++fname;
11645 fprintf (file, "\t.short %d\n", (int) strlen (fname));
11646
11647 /* Function name. */
11648 assemble_string (fname, strlen (fname));
11649
11650 /* Register for alloca automatic storage; this is always reg 31.
11651 Only emit this if the alloca bit was set above. */
11652 if (frame_pointer_needed)
11653 fputs ("\t.byte 31\n", file);
11654
11655 fputs ("\t.align 2\n", file);
11656 }
11657 }
11658 \f
11659 /* A C compound statement that outputs the assembler code for a thunk
11660 function, used to implement C++ virtual function calls with
11661 multiple inheritance. The thunk acts as a wrapper around a virtual
11662 function, adjusting the implicit object parameter before handing
11663 control off to the real function.
11664
11665 First, emit code to add the integer DELTA to the location that
11666 contains the incoming first argument. Assume that this argument
11667 contains a pointer, and is the one used to pass the `this' pointer
11668 in C++. This is the incoming argument *before* the function
11669 prologue, e.g. `%o0' on a sparc. The addition must preserve the
11670 values of all other incoming arguments.
11671
11672 After the addition, emit code to jump to FUNCTION, which is a
11673 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
11674 not touch the return address. Hence returning from FUNCTION will
11675 return to whoever called the current `thunk'.
11676
11677 The effect must be as if FUNCTION had been called directly with the
11678 adjusted first argument. This macro is responsible for emitting
11679 all of the code for a thunk function; output_function_prologue()
11680 and output_function_epilogue() are not invoked.
11681
11682 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
11683 been extracted from it.) It might possibly be useful on some
11684 targets, but probably not.
11685
11686 If you do not define this macro, the target-independent code in the
11687 C++ frontend will generate a less efficient heavyweight thunk that
11688 calls FUNCTION instead of jumping to it. The generic approach does
11689 not support varargs. */
11690
11691 static void
11692 rs6000_output_mi_thunk (file, thunk_fndecl, delta, vcall_offset, function)
11693 FILE *file;
11694 tree thunk_fndecl ATTRIBUTE_UNUSED;
11695 HOST_WIDE_INT delta;
11696 HOST_WIDE_INT vcall_offset;
11697 tree function;
11698 {
11699 rtx this, insn, funexp;
11700
11701 reload_completed = 1;
11702 no_new_pseudos = 1;
11703
11704 /* Mark the end of the (empty) prologue. */
11705 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
11706
11707 /* Find the "this" pointer. If the function returns a structure,
11708 the structure return pointer is in r3. */
11709 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
11710 this = gen_rtx_REG (Pmode, 4);
11711 else
11712 this = gen_rtx_REG (Pmode, 3);
11713
11714 /* Apply the constant offset, if required. */
11715 if (delta)
11716 {
11717 rtx delta_rtx = GEN_INT (delta);
11718 emit_insn (TARGET_32BIT
11719 ? gen_addsi3 (this, this, delta_rtx)
11720 : gen_adddi3 (this, this, delta_rtx));
11721 }
11722
11723 /* Apply the offset from the vtable, if required. */
11724 if (vcall_offset)
11725 {
11726 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
11727 rtx tmp = gen_rtx_REG (Pmode, 12);
11728
11729 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
11730 emit_insn (TARGET_32BIT
11731 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
11732 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
11733 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
11734 emit_insn (TARGET_32BIT
11735 ? gen_addsi3 (this, this, tmp)
11736 : gen_adddi3 (this, this, tmp));
11737 }
11738
11739 /* Generate a tail call to the target function. */
11740 if (!TREE_USED (function))
11741 {
11742 assemble_external (function);
11743 TREE_USED (function) = 1;
11744 }
11745 funexp = XEXP (DECL_RTL (function), 0);
11746
11747 SYMBOL_REF_FLAG (funexp) = 0;
11748 if (current_file_function_operand (funexp, VOIDmode)
11749 && (! lookup_attribute ("longcall",
11750 TYPE_ATTRIBUTES (TREE_TYPE (function)))
11751 || lookup_attribute ("shortcall",
11752 TYPE_ATTRIBUTES (TREE_TYPE (function)))))
11753 SYMBOL_REF_FLAG (funexp) = 1;
11754
11755 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
11756
11757 #if TARGET_MACHO
11758 if (MACHOPIC_INDIRECT)
11759 funexp = machopic_indirect_call_target (funexp);
11760 #endif
11761
11762 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
11763 generate sibcall RTL explicitly to avoid constraint abort. */
11764 insn = emit_call_insn (
11765 gen_rtx_PARALLEL (VOIDmode,
11766 gen_rtvec (4,
11767 gen_rtx_CALL (VOIDmode,
11768 funexp, const0_rtx),
11769 gen_rtx_USE (VOIDmode, const0_rtx),
11770 gen_rtx_USE (VOIDmode,
11771 gen_rtx_REG (SImode,
11772 LINK_REGISTER_REGNUM)),
11773 gen_rtx_RETURN (VOIDmode))));
11774 SIBLING_CALL_P (insn) = 1;
11775 emit_barrier ();
11776
11777 /* Run just enough of rest_of_compilation to get the insns emitted.
11778 There's not really enough bulk here to make other passes such as
11779 instruction scheduling worth while. Note that use_thunk calls
11780 assemble_start_function and assemble_end_function. */
11781 insn = get_insns ();
11782 shorten_branches (insn);
11783 final_start_function (insn, file, 1);
11784 final (insn, file, 1, 0);
11785 final_end_function ();
11786
11787 reload_completed = 0;
11788 no_new_pseudos = 0;
11789 }
11790 \f
11791 /* A quick summary of the various types of 'constant-pool tables'
11792 under PowerPC:
11793
11794 Target Flags Name One table per
11795 AIX (none) AIX TOC object file
11796 AIX -mfull-toc AIX TOC object file
11797 AIX -mminimal-toc AIX minimal TOC translation unit
11798 SVR4/EABI (none) SVR4 SDATA object file
11799 SVR4/EABI -fpic SVR4 pic object file
11800 SVR4/EABI -fPIC SVR4 PIC translation unit
11801 SVR4/EABI -mrelocatable EABI TOC function
11802 SVR4/EABI -maix AIX TOC object file
11803 SVR4/EABI -maix -mminimal-toc
11804 AIX minimal TOC translation unit
11805
11806 Name Reg. Set by entries contains:
11807 made by addrs? fp? sum?
11808
11809 AIX TOC 2 crt0 as Y option option
11810 AIX minimal TOC 30 prolog gcc Y Y option
11811 SVR4 SDATA 13 crt0 gcc N Y N
11812 SVR4 pic 30 prolog ld Y not yet N
11813 SVR4 PIC 30 prolog gcc Y option option
11814 EABI TOC 30 prolog gcc Y option option
11815
11816 */
11817
11818 /* Hash functions for the hash table. */
11819
11820 static unsigned
11821 rs6000_hash_constant (k)
11822 rtx k;
11823 {
11824 enum rtx_code code = GET_CODE (k);
11825 enum machine_mode mode = GET_MODE (k);
11826 unsigned result = (code << 3) ^ mode;
11827 const char *format;
11828 int flen, fidx;
11829
11830 format = GET_RTX_FORMAT (code);
11831 flen = strlen (format);
11832 fidx = 0;
11833
11834 switch (code)
11835 {
11836 case LABEL_REF:
11837 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
11838
11839 case CONST_DOUBLE:
11840 if (mode != VOIDmode)
11841 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
11842 flen = 2;
11843 break;
11844
11845 case CODE_LABEL:
11846 fidx = 3;
11847 break;
11848
11849 default:
11850 break;
11851 }
11852
11853 for (; fidx < flen; fidx++)
11854 switch (format[fidx])
11855 {
11856 case 's':
11857 {
11858 unsigned i, len;
11859 const char *str = XSTR (k, fidx);
11860 len = strlen (str);
11861 result = result * 613 + len;
11862 for (i = 0; i < len; i++)
11863 result = result * 613 + (unsigned) str[i];
11864 break;
11865 }
11866 case 'u':
11867 case 'e':
11868 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
11869 break;
11870 case 'i':
11871 case 'n':
11872 result = result * 613 + (unsigned) XINT (k, fidx);
11873 break;
11874 case 'w':
11875 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
11876 result = result * 613 + (unsigned) XWINT (k, fidx);
11877 else
11878 {
11879 size_t i;
11880 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
11881 result = result * 613 + (unsigned) (XWINT (k, fidx)
11882 >> CHAR_BIT * i);
11883 }
11884 break;
11885 default:
11886 abort ();
11887 }
11888
11889 return result;
11890 }
11891
11892 static unsigned
11893 toc_hash_function (hash_entry)
11894 const void * hash_entry;
11895 {
11896 const struct toc_hash_struct *thc =
11897 (const struct toc_hash_struct *) hash_entry;
11898 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
11899 }
11900
11901 /* Compare H1 and H2 for equivalence. */
11902
11903 static int
11904 toc_hash_eq (h1, h2)
11905 const void * h1;
11906 const void * h2;
11907 {
11908 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
11909 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
11910
11911 if (((const struct toc_hash_struct *) h1)->key_mode
11912 != ((const struct toc_hash_struct *) h2)->key_mode)
11913 return 0;
11914
11915 return rtx_equal_p (r1, r2);
11916 }
11917
11918 /* These are the names given by the C++ front-end to vtables, and
11919 vtable-like objects. Ideally, this logic should not be here;
11920 instead, there should be some programmatic way of inquiring as
11921 to whether or not an object is a vtable. */
11922
11923 #define VTABLE_NAME_P(NAME) \
11924 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
11925 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
11926 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
11927 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
11928
11929 void
11930 rs6000_output_symbol_ref (file, x)
11931 FILE *file;
11932 rtx x;
11933 {
11934 /* Currently C++ toc references to vtables can be emitted before it
11935 is decided whether the vtable is public or private. If this is
11936 the case, then the linker will eventually complain that there is
11937 a reference to an unknown section. Thus, for vtables only,
11938 we emit the TOC reference to reference the symbol and not the
11939 section. */
11940 const char *name = XSTR (x, 0);
11941
11942 if (VTABLE_NAME_P (name))
11943 {
11944 RS6000_OUTPUT_BASENAME (file, name);
11945 }
11946 else
11947 assemble_name (file, name);
11948 }
11949
11950 /* Output a TOC entry. We derive the entry name from what is being
11951 written. */
11952
11953 void
11954 output_toc (file, x, labelno, mode)
11955 FILE *file;
11956 rtx x;
11957 int labelno;
11958 enum machine_mode mode;
11959 {
11960 char buf[256];
11961 const char *name = buf;
11962 const char *real_name;
11963 rtx base = x;
11964 int offset = 0;
11965
11966 if (TARGET_NO_TOC)
11967 abort ();
11968
11969 /* When the linker won't eliminate them, don't output duplicate
11970 TOC entries (this happens on AIX if there is any kind of TOC,
11971 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
11972 CODE_LABELs. */
11973 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
11974 {
11975 struct toc_hash_struct *h;
11976 void * * found;
11977
11978 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
11979 time because GGC is not initialised at that point. */
11980 if (toc_hash_table == NULL)
11981 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
11982 toc_hash_eq, NULL);
11983
11984 h = ggc_alloc (sizeof (*h));
11985 h->key = x;
11986 h->key_mode = mode;
11987 h->labelno = labelno;
11988
11989 found = htab_find_slot (toc_hash_table, h, 1);
11990 if (*found == NULL)
11991 *found = h;
11992 else /* This is indeed a duplicate.
11993 Set this label equal to that label. */
11994 {
11995 fputs ("\t.set ", file);
11996 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
11997 fprintf (file, "%d,", labelno);
11998 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
11999 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
12000 found)->labelno));
12001 return;
12002 }
12003 }
12004
12005 /* If we're going to put a double constant in the TOC, make sure it's
12006 aligned properly when strict alignment is on. */
12007 if (GET_CODE (x) == CONST_DOUBLE
12008 && STRICT_ALIGNMENT
12009 && GET_MODE_BITSIZE (mode) >= 64
12010 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
12011 ASM_OUTPUT_ALIGN (file, 3);
12012 }
12013
12014 (*targetm.asm_out.internal_label) (file, "LC", labelno);
12015
12016 /* Handle FP constants specially. Note that if we have a minimal
12017 TOC, things we put here aren't actually in the TOC, so we can allow
12018 FP constants. */
12019 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
12020 {
12021 REAL_VALUE_TYPE rv;
12022 long k[4];
12023
12024 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12025 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
12026
12027 if (TARGET_64BIT)
12028 {
12029 if (TARGET_MINIMAL_TOC)
12030 fputs (DOUBLE_INT_ASM_OP, file);
12031 else
12032 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12033 k[0] & 0xffffffff, k[1] & 0xffffffff,
12034 k[2] & 0xffffffff, k[3] & 0xffffffff);
12035 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
12036 k[0] & 0xffffffff, k[1] & 0xffffffff,
12037 k[2] & 0xffffffff, k[3] & 0xffffffff);
12038 return;
12039 }
12040 else
12041 {
12042 if (TARGET_MINIMAL_TOC)
12043 fputs ("\t.long ", file);
12044 else
12045 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12046 k[0] & 0xffffffff, k[1] & 0xffffffff,
12047 k[2] & 0xffffffff, k[3] & 0xffffffff);
12048 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
12049 k[0] & 0xffffffff, k[1] & 0xffffffff,
12050 k[2] & 0xffffffff, k[3] & 0xffffffff);
12051 return;
12052 }
12053 }
12054 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
12055 {
12056 REAL_VALUE_TYPE rv;
12057 long k[2];
12058
12059 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12060 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
12061
12062 if (TARGET_64BIT)
12063 {
12064 if (TARGET_MINIMAL_TOC)
12065 fputs (DOUBLE_INT_ASM_OP, file);
12066 else
12067 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
12068 k[0] & 0xffffffff, k[1] & 0xffffffff);
12069 fprintf (file, "0x%lx%08lx\n",
12070 k[0] & 0xffffffff, k[1] & 0xffffffff);
12071 return;
12072 }
12073 else
12074 {
12075 if (TARGET_MINIMAL_TOC)
12076 fputs ("\t.long ", file);
12077 else
12078 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
12079 k[0] & 0xffffffff, k[1] & 0xffffffff);
12080 fprintf (file, "0x%lx,0x%lx\n",
12081 k[0] & 0xffffffff, k[1] & 0xffffffff);
12082 return;
12083 }
12084 }
12085 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
12086 {
12087 REAL_VALUE_TYPE rv;
12088 long l;
12089
12090 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12091 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
12092
12093 if (TARGET_64BIT)
12094 {
12095 if (TARGET_MINIMAL_TOC)
12096 fputs (DOUBLE_INT_ASM_OP, file);
12097 else
12098 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
12099 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
12100 return;
12101 }
12102 else
12103 {
12104 if (TARGET_MINIMAL_TOC)
12105 fputs ("\t.long ", file);
12106 else
12107 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
12108 fprintf (file, "0x%lx\n", l & 0xffffffff);
12109 return;
12110 }
12111 }
12112 else if (GET_MODE (x) == VOIDmode
12113 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
12114 {
12115 unsigned HOST_WIDE_INT low;
12116 HOST_WIDE_INT high;
12117
12118 if (GET_CODE (x) == CONST_DOUBLE)
12119 {
12120 low = CONST_DOUBLE_LOW (x);
12121 high = CONST_DOUBLE_HIGH (x);
12122 }
12123 else
12124 #if HOST_BITS_PER_WIDE_INT == 32
12125 {
12126 low = INTVAL (x);
12127 high = (low & 0x80000000) ? ~0 : 0;
12128 }
12129 #else
12130 {
12131 low = INTVAL (x) & 0xffffffff;
12132 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
12133 }
12134 #endif
12135
12136 /* TOC entries are always Pmode-sized, but since this
12137 is a bigendian machine then if we're putting smaller
12138 integer constants in the TOC we have to pad them.
12139 (This is still a win over putting the constants in
12140 a separate constant pool, because then we'd have
12141 to have both a TOC entry _and_ the actual constant.)
12142
12143 For a 32-bit target, CONST_INT values are loaded and shifted
12144 entirely within `low' and can be stored in one TOC entry. */
12145
12146 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
12147 abort ();/* It would be easy to make this work, but it doesn't now. */
12148
12149 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
12150 {
12151 #if HOST_BITS_PER_WIDE_INT == 32
12152 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
12153 POINTER_SIZE, &low, &high, 0);
12154 #else
12155 low |= high << 32;
12156 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
12157 high = (HOST_WIDE_INT) low >> 32;
12158 low &= 0xffffffff;
12159 #endif
12160 }
12161
12162 if (TARGET_64BIT)
12163 {
12164 if (TARGET_MINIMAL_TOC)
12165 fputs (DOUBLE_INT_ASM_OP, file);
12166 else
12167 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
12168 (long) high & 0xffffffff, (long) low & 0xffffffff);
12169 fprintf (file, "0x%lx%08lx\n",
12170 (long) high & 0xffffffff, (long) low & 0xffffffff);
12171 return;
12172 }
12173 else
12174 {
12175 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
12176 {
12177 if (TARGET_MINIMAL_TOC)
12178 fputs ("\t.long ", file);
12179 else
12180 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
12181 (long) high & 0xffffffff, (long) low & 0xffffffff);
12182 fprintf (file, "0x%lx,0x%lx\n",
12183 (long) high & 0xffffffff, (long) low & 0xffffffff);
12184 }
12185 else
12186 {
12187 if (TARGET_MINIMAL_TOC)
12188 fputs ("\t.long ", file);
12189 else
12190 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
12191 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
12192 }
12193 return;
12194 }
12195 }
12196
12197 if (GET_CODE (x) == CONST)
12198 {
12199 if (GET_CODE (XEXP (x, 0)) != PLUS)
12200 abort ();
12201
12202 base = XEXP (XEXP (x, 0), 0);
12203 offset = INTVAL (XEXP (XEXP (x, 0), 1));
12204 }
12205
12206 if (GET_CODE (base) == SYMBOL_REF)
12207 name = XSTR (base, 0);
12208 else if (GET_CODE (base) == LABEL_REF)
12209 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
12210 else if (GET_CODE (base) == CODE_LABEL)
12211 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
12212 else
12213 abort ();
12214
12215 real_name = (*targetm.strip_name_encoding) (name);
12216 if (TARGET_MINIMAL_TOC)
12217 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
12218 else
12219 {
12220 fprintf (file, "\t.tc %s", real_name);
12221
12222 if (offset < 0)
12223 fprintf (file, ".N%d", - offset);
12224 else if (offset)
12225 fprintf (file, ".P%d", offset);
12226
12227 fputs ("[TC],", file);
12228 }
12229
12230 /* Currently C++ toc references to vtables can be emitted before it
12231 is decided whether the vtable is public or private. If this is
12232 the case, then the linker will eventually complain that there is
12233 a TOC reference to an unknown section. Thus, for vtables only,
12234 we emit the TOC reference to reference the symbol and not the
12235 section. */
12236 if (VTABLE_NAME_P (name))
12237 {
12238 RS6000_OUTPUT_BASENAME (file, name);
12239 if (offset < 0)
12240 fprintf (file, "%d", offset);
12241 else if (offset > 0)
12242 fprintf (file, "+%d", offset);
12243 }
12244 else
12245 output_addr_const (file, x);
12246 putc ('\n', file);
12247 }
12248 \f
12249 /* Output an assembler pseudo-op to write an ASCII string of N characters
12250 starting at P to FILE.
12251
12252 On the RS/6000, we have to do this using the .byte operation and
12253 write out special characters outside the quoted string.
12254 Also, the assembler is broken; very long strings are truncated,
12255 so we must artificially break them up early. */
12256
12257 void
12258 output_ascii (file, p, n)
12259 FILE *file;
12260 const char *p;
12261 int n;
12262 {
12263 char c;
12264 int i, count_string;
12265 const char *for_string = "\t.byte \"";
12266 const char *for_decimal = "\t.byte ";
12267 const char *to_close = NULL;
12268
12269 count_string = 0;
12270 for (i = 0; i < n; i++)
12271 {
12272 c = *p++;
12273 if (c >= ' ' && c < 0177)
12274 {
12275 if (for_string)
12276 fputs (for_string, file);
12277 putc (c, file);
12278
12279 /* Write two quotes to get one. */
12280 if (c == '"')
12281 {
12282 putc (c, file);
12283 ++count_string;
12284 }
12285
12286 for_string = NULL;
12287 for_decimal = "\"\n\t.byte ";
12288 to_close = "\"\n";
12289 ++count_string;
12290
12291 if (count_string >= 512)
12292 {
12293 fputs (to_close, file);
12294
12295 for_string = "\t.byte \"";
12296 for_decimal = "\t.byte ";
12297 to_close = NULL;
12298 count_string = 0;
12299 }
12300 }
12301 else
12302 {
12303 if (for_decimal)
12304 fputs (for_decimal, file);
12305 fprintf (file, "%d", c);
12306
12307 for_string = "\n\t.byte \"";
12308 for_decimal = ", ";
12309 to_close = "\n";
12310 count_string = 0;
12311 }
12312 }
12313
12314 /* Now close the string if we have written one. Then end the line. */
12315 if (to_close)
12316 fputs (to_close, file);
12317 }
12318 \f
12319 /* Generate a unique section name for FILENAME for a section type
12320 represented by SECTION_DESC. Output goes into BUF.
12321
12322 SECTION_DESC can be any string, as long as it is different for each
12323 possible section type.
12324
12325 We name the section in the same manner as xlc. The name begins with an
12326 underscore followed by the filename (after stripping any leading directory
12327 names) with the last period replaced by the string SECTION_DESC. If
12328 FILENAME does not contain a period, SECTION_DESC is appended to the end of
12329 the name. */
12330
12331 void
12332 rs6000_gen_section_name (buf, filename, section_desc)
12333 char **buf;
12334 const char *filename;
12335 const char *section_desc;
12336 {
12337 const char *q, *after_last_slash, *last_period = 0;
12338 char *p;
12339 int len;
12340
12341 after_last_slash = filename;
12342 for (q = filename; *q; q++)
12343 {
12344 if (*q == '/')
12345 after_last_slash = q + 1;
12346 else if (*q == '.')
12347 last_period = q;
12348 }
12349
12350 len = strlen (after_last_slash) + strlen (section_desc) + 2;
12351 *buf = (char *) xmalloc (len);
12352
12353 p = *buf;
12354 *p++ = '_';
12355
12356 for (q = after_last_slash; *q; q++)
12357 {
12358 if (q == last_period)
12359 {
12360 strcpy (p, section_desc);
12361 p += strlen (section_desc);
12362 break;
12363 }
12364
12365 else if (ISALNUM (*q))
12366 *p++ = *q;
12367 }
12368
12369 if (last_period == 0)
12370 strcpy (p, section_desc);
12371 else
12372 *p = '\0';
12373 }
12374 \f
12375 /* Emit profile function. */
12376
12377 void
12378 output_profile_hook (labelno)
12379 int labelno ATTRIBUTE_UNUSED;
12380 {
12381 if (TARGET_PROFILE_KERNEL)
12382 return;
12383
12384 if (DEFAULT_ABI == ABI_AIX)
12385 {
12386 #ifdef NO_PROFILE_COUNTERS
12387 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
12388 #else
12389 char buf[30];
12390 const char *label_name;
12391 rtx fun;
12392
12393 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12394 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
12395 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
12396
12397 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
12398 fun, Pmode);
12399 #endif
12400 }
12401 else if (DEFAULT_ABI == ABI_DARWIN)
12402 {
12403 const char *mcount_name = RS6000_MCOUNT;
12404 int caller_addr_regno = LINK_REGISTER_REGNUM;
12405
12406 /* Be conservative and always set this, at least for now. */
12407 current_function_uses_pic_offset_table = 1;
12408
12409 #if TARGET_MACHO
12410 /* For PIC code, set up a stub and collect the caller's address
12411 from r0, which is where the prologue puts it. */
12412 if (MACHOPIC_INDIRECT)
12413 {
12414 mcount_name = machopic_stub_name (mcount_name);
12415 if (current_function_uses_pic_offset_table)
12416 caller_addr_regno = 0;
12417 }
12418 #endif
12419 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
12420 0, VOIDmode, 1,
12421 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
12422 }
12423 }
12424
12425 /* Write function profiler code. */
12426
12427 void
12428 output_function_profiler (file, labelno)
12429 FILE *file;
12430 int labelno;
12431 {
12432 char buf[100];
12433 int save_lr = 8;
12434
12435 switch (DEFAULT_ABI)
12436 {
12437 default:
12438 abort ();
12439
12440 case ABI_V4:
12441 save_lr = 4;
12442 /* Fall through. */
12443
12444 case ABI_AIX_NODESC:
12445 if (!TARGET_32BIT)
12446 {
12447 warning ("no profiling of 64-bit code for this ABI");
12448 return;
12449 }
12450 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12451 fprintf (file, "\tmflr %s\n", reg_names[0]);
12452 if (flag_pic == 1)
12453 {
12454 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
12455 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12456 reg_names[0], save_lr, reg_names[1]);
12457 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
12458 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
12459 assemble_name (file, buf);
12460 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
12461 }
12462 else if (flag_pic > 1)
12463 {
12464 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12465 reg_names[0], save_lr, reg_names[1]);
12466 /* Now, we need to get the address of the label. */
12467 fputs ("\tbl 1f\n\t.long ", file);
12468 assemble_name (file, buf);
12469 fputs ("-.\n1:", file);
12470 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
12471 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
12472 reg_names[0], reg_names[11]);
12473 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
12474 reg_names[0], reg_names[0], reg_names[11]);
12475 }
12476 else
12477 {
12478 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
12479 assemble_name (file, buf);
12480 fputs ("@ha\n", file);
12481 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12482 reg_names[0], save_lr, reg_names[1]);
12483 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
12484 assemble_name (file, buf);
12485 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
12486 }
12487
12488 if (current_function_needs_context && DEFAULT_ABI == ABI_AIX_NODESC)
12489 {
12490 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12491 reg_names[STATIC_CHAIN_REGNUM],
12492 12, reg_names[1]);
12493 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12494 asm_fprintf (file, "\t{l|lwz} %s,%d(%s)\n",
12495 reg_names[STATIC_CHAIN_REGNUM],
12496 12, reg_names[1]);
12497 }
12498 else
12499 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
12500 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12501 break;
12502
12503 case ABI_AIX:
12504 case ABI_DARWIN:
12505 if (!TARGET_PROFILE_KERNEL)
12506 {
12507 /* Don't do anything, done in output_profile_hook (). */
12508 }
12509 else
12510 {
12511 if (TARGET_32BIT)
12512 abort ();
12513
12514 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
12515 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
12516
12517 if (current_function_needs_context)
12518 {
12519 asm_fprintf (file, "\tstd %s,24(%s)\n",
12520 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
12521 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12522 asm_fprintf (file, "\tld %s,24(%s)\n",
12523 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
12524 }
12525 else
12526 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12527 }
12528 break;
12529 }
12530 }
12531
12532 \f
12533 static int
12534 rs6000_use_dfa_pipeline_interface ()
12535 {
12536 return 1;
12537 }
12538
12539 /* Power4 load update and store update instructions are cracked into a
12540 load or store and an integer insn which are executed in the same cycle.
12541 Branches have their own dispatch slot which does not count against the
12542 GCC issue rate, but it changes the program flow so there are no other
12543 instructions to issue in this cycle. */
12544
12545 static int
12546 rs6000_variable_issue (stream, verbose, insn, more)
12547 FILE *stream ATTRIBUTE_UNUSED;
12548 int verbose ATTRIBUTE_UNUSED;
12549 rtx insn;
12550 int more;
12551 {
12552 if (GET_CODE (PATTERN (insn)) == USE
12553 || GET_CODE (PATTERN (insn)) == CLOBBER)
12554 return more;
12555
12556 if (rs6000_cpu == PROCESSOR_POWER4)
12557 {
12558 enum attr_type type = get_attr_type (insn);
12559 if (type == TYPE_LOAD_EXT_U || type == TYPE_LOAD_EXT_UX
12560 || type == TYPE_LOAD_UX || type == TYPE_STORE_UX)
12561 return 0;
12562 else if (type == TYPE_LOAD_U || type == TYPE_STORE_U
12563 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
12564 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
12565 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
12566 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
12567 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
12568 || type == TYPE_IDIV || type == TYPE_LDIV)
12569 return more > 2 ? more - 2 : 0;
12570 }
12571
12572 return more - 1;
12573 }
12574
12575 /* Adjust the cost of a scheduling dependency. Return the new cost of
12576 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
12577
12578 static int
12579 rs6000_adjust_cost (insn, link, dep_insn, cost)
12580 rtx insn;
12581 rtx link;
12582 rtx dep_insn ATTRIBUTE_UNUSED;
12583 int cost;
12584 {
12585 if (! recog_memoized (insn))
12586 return 0;
12587
12588 if (REG_NOTE_KIND (link) != 0)
12589 return 0;
12590
12591 if (REG_NOTE_KIND (link) == 0)
12592 {
12593 /* Data dependency; DEP_INSN writes a register that INSN reads
12594 some cycles later. */
12595 switch (get_attr_type (insn))
12596 {
12597 case TYPE_JMPREG:
12598 /* Tell the first scheduling pass about the latency between
12599 a mtctr and bctr (and mtlr and br/blr). The first
12600 scheduling pass will not know about this latency since
12601 the mtctr instruction, which has the latency associated
12602 to it, will be generated by reload. */
12603 return TARGET_POWER ? 5 : 4;
12604 case TYPE_BRANCH:
12605 /* Leave some extra cycles between a compare and its
12606 dependent branch, to inhibit expensive mispredicts. */
12607 if ((rs6000_cpu_attr == CPU_PPC603
12608 || rs6000_cpu_attr == CPU_PPC604
12609 || rs6000_cpu_attr == CPU_PPC604E
12610 || rs6000_cpu_attr == CPU_PPC620
12611 || rs6000_cpu_attr == CPU_PPC630
12612 || rs6000_cpu_attr == CPU_PPC750
12613 || rs6000_cpu_attr == CPU_PPC7400
12614 || rs6000_cpu_attr == CPU_PPC7450
12615 || rs6000_cpu_attr == CPU_POWER4)
12616 && recog_memoized (dep_insn)
12617 && (INSN_CODE (dep_insn) >= 0)
12618 && (get_attr_type (dep_insn) == TYPE_CMP
12619 || get_attr_type (dep_insn) == TYPE_COMPARE
12620 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
12621 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
12622 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
12623 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
12624 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
12625 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
12626 return cost + 2;
12627 default:
12628 break;
12629 }
12630 /* Fall out to return default cost. */
12631 }
12632
12633 return cost;
12634 }
12635
12636 /* A C statement (sans semicolon) to update the integer scheduling
12637 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
12638 INSN earlier, increase the priority to execute INSN later. Do not
12639 define this macro if you do not need to adjust the scheduling
12640 priorities of insns. */
12641
12642 static int
12643 rs6000_adjust_priority (insn, priority)
12644 rtx insn ATTRIBUTE_UNUSED;
12645 int priority;
12646 {
12647 /* On machines (like the 750) which have asymmetric integer units,
12648 where one integer unit can do multiply and divides and the other
12649 can't, reduce the priority of multiply/divide so it is scheduled
12650 before other integer operations. */
12651
12652 #if 0
12653 if (! INSN_P (insn))
12654 return priority;
12655
12656 if (GET_CODE (PATTERN (insn)) == USE)
12657 return priority;
12658
12659 switch (rs6000_cpu_attr) {
12660 case CPU_PPC750:
12661 switch (get_attr_type (insn))
12662 {
12663 default:
12664 break;
12665
12666 case TYPE_IMUL:
12667 case TYPE_IDIV:
12668 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
12669 priority, priority);
12670 if (priority >= 0 && priority < 0x01000000)
12671 priority >>= 3;
12672 break;
12673 }
12674 }
12675 #endif
12676
12677 return priority;
12678 }
12679
12680 /* Return how many instructions the machine can issue per cycle. */
12681
12682 static int
12683 rs6000_issue_rate ()
12684 {
12685 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
12686 if (!reload_completed)
12687 return 1;
12688
12689 switch (rs6000_cpu_attr) {
12690 case CPU_RIOS1: /* ? */
12691 case CPU_RS64A:
12692 case CPU_PPC601: /* ? */
12693 case CPU_PPC7450:
12694 return 3;
12695 case CPU_PPC440:
12696 case CPU_PPC603:
12697 case CPU_PPC750:
12698 case CPU_PPC7400:
12699 case CPU_PPC8540:
12700 return 2;
12701 case CPU_RIOS2:
12702 case CPU_PPC604:
12703 case CPU_PPC604E:
12704 case CPU_PPC620:
12705 case CPU_PPC630:
12706 case CPU_POWER4:
12707 return 4;
12708 default:
12709 return 1;
12710 }
12711 }
12712
12713 /* Return how many instructions to look ahead for better insn
12714 scheduling. */
12715
12716 static int
12717 rs6000_use_sched_lookahead ()
12718 {
12719 if (rs6000_cpu_attr == CPU_PPC8540)
12720 return 4;
12721 return 0;
12722 }
12723
12724 \f
12725 /* Length in units of the trampoline for entering a nested function. */
12726
12727 int
12728 rs6000_trampoline_size ()
12729 {
12730 int ret = 0;
12731
12732 switch (DEFAULT_ABI)
12733 {
12734 default:
12735 abort ();
12736
12737 case ABI_AIX:
12738 ret = (TARGET_32BIT) ? 12 : 24;
12739 break;
12740
12741 case ABI_DARWIN:
12742 case ABI_V4:
12743 case ABI_AIX_NODESC:
12744 ret = (TARGET_32BIT) ? 40 : 48;
12745 break;
12746 }
12747
12748 return ret;
12749 }
12750
12751 /* Emit RTL insns to initialize the variable parts of a trampoline.
12752 FNADDR is an RTX for the address of the function's pure code.
12753 CXT is an RTX for the static chain value for the function. */
12754
12755 void
12756 rs6000_initialize_trampoline (addr, fnaddr, cxt)
12757 rtx addr;
12758 rtx fnaddr;
12759 rtx cxt;
12760 {
12761 enum machine_mode pmode = Pmode;
12762 int regsize = (TARGET_32BIT) ? 4 : 8;
12763 rtx ctx_reg = force_reg (pmode, cxt);
12764
12765 switch (DEFAULT_ABI)
12766 {
12767 default:
12768 abort ();
12769
12770 /* Macros to shorten the code expansions below. */
12771 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
12772 #define MEM_PLUS(addr,offset) \
12773 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
12774
12775 /* Under AIX, just build the 3 word function descriptor */
12776 case ABI_AIX:
12777 {
12778 rtx fn_reg = gen_reg_rtx (pmode);
12779 rtx toc_reg = gen_reg_rtx (pmode);
12780 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
12781 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
12782 emit_move_insn (MEM_DEREF (addr), fn_reg);
12783 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
12784 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
12785 }
12786 break;
12787
12788 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
12789 case ABI_DARWIN:
12790 case ABI_V4:
12791 case ABI_AIX_NODESC:
12792 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
12793 FALSE, VOIDmode, 4,
12794 addr, pmode,
12795 GEN_INT (rs6000_trampoline_size ()), SImode,
12796 fnaddr, pmode,
12797 ctx_reg, pmode);
12798 break;
12799 }
12800
12801 return;
12802 }
12803
12804 \f
12805 /* Table of valid machine attributes. */
12806
12807 const struct attribute_spec rs6000_attribute_table[] =
12808 {
12809 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
12810 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
12811 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
12812 { NULL, 0, 0, false, false, false, NULL }
12813 };
12814
12815 /* Handle a "longcall" or "shortcall" attribute; arguments as in
12816 struct attribute_spec.handler. */
12817
12818 static tree
12819 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
12820 tree *node;
12821 tree name;
12822 tree args ATTRIBUTE_UNUSED;
12823 int flags ATTRIBUTE_UNUSED;
12824 bool *no_add_attrs;
12825 {
12826 if (TREE_CODE (*node) != FUNCTION_TYPE
12827 && TREE_CODE (*node) != FIELD_DECL
12828 && TREE_CODE (*node) != TYPE_DECL)
12829 {
12830 warning ("`%s' attribute only applies to functions",
12831 IDENTIFIER_POINTER (name));
12832 *no_add_attrs = true;
12833 }
12834
12835 return NULL_TREE;
12836 }
12837
12838 /* Set longcall attributes on all functions declared when
12839 rs6000_default_long_calls is true. */
12840 static void
12841 rs6000_set_default_type_attributes (type)
12842 tree type;
12843 {
12844 if (rs6000_default_long_calls
12845 && (TREE_CODE (type) == FUNCTION_TYPE
12846 || TREE_CODE (type) == METHOD_TYPE))
12847 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
12848 NULL_TREE,
12849 TYPE_ATTRIBUTES (type));
12850 }
12851
12852 /* Return a reference suitable for calling a function with the
12853 longcall attribute. */
12854
12855 struct rtx_def *
12856 rs6000_longcall_ref (call_ref)
12857 rtx call_ref;
12858 {
12859 const char *call_name;
12860 tree node;
12861
12862 if (GET_CODE (call_ref) != SYMBOL_REF)
12863 return call_ref;
12864
12865 /* System V adds '.' to the internal name, so skip them. */
12866 call_name = XSTR (call_ref, 0);
12867 if (*call_name == '.')
12868 {
12869 while (*call_name == '.')
12870 call_name++;
12871
12872 node = get_identifier (call_name);
12873 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
12874 }
12875
12876 return force_reg (Pmode, call_ref);
12877 }
12878
12879 \f
12880 #ifdef USING_ELFOS_H
12881
12882 /* A C statement or statements to switch to the appropriate section
12883 for output of RTX in mode MODE. You can assume that RTX is some
12884 kind of constant in RTL. The argument MODE is redundant except in
12885 the case of a `const_int' rtx. Select the section by calling
12886 `text_section' or one of the alternatives for other sections.
12887
12888 Do not define this macro if you put all constants in the read-only
12889 data section. */
12890
12891 static void
12892 rs6000_elf_select_rtx_section (mode, x, align)
12893 enum machine_mode mode;
12894 rtx x;
12895 unsigned HOST_WIDE_INT align;
12896 {
12897 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
12898 toc_section ();
12899 else
12900 default_elf_select_rtx_section (mode, x, align);
12901 }
12902
12903 /* A C statement or statements to switch to the appropriate
12904 section for output of DECL. DECL is either a `VAR_DECL' node
12905 or a constant of some sort. RELOC indicates whether forming
12906 the initial value of DECL requires link-time relocations. */
12907
12908 static void
12909 rs6000_elf_select_section (decl, reloc, align)
12910 tree decl;
12911 int reloc;
12912 unsigned HOST_WIDE_INT align;
12913 {
12914 /* Pretend that we're always building for a shared library when
12915 ABI_AIX, because otherwise we end up with dynamic relocations
12916 in read-only sections. This happens for function pointers,
12917 references to vtables in typeinfo, and probably other cases. */
12918 default_elf_select_section_1 (decl, reloc, align,
12919 flag_pic || DEFAULT_ABI == ABI_AIX);
12920 }
12921
12922 /* A C statement to build up a unique section name, expressed as a
12923 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
12924 RELOC indicates whether the initial value of EXP requires
12925 link-time relocations. If you do not define this macro, GCC will use
12926 the symbol name prefixed by `.' as the section name. Note - this
12927 macro can now be called for uninitialized data items as well as
12928 initialized data and functions. */
12929
12930 static void
12931 rs6000_elf_unique_section (decl, reloc)
12932 tree decl;
12933 int reloc;
12934 {
12935 /* As above, pretend that we're always building for a shared library
12936 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
12937 default_unique_section_1 (decl, reloc,
12938 flag_pic || DEFAULT_ABI == ABI_AIX);
12939 }
12940 \f
12941 /* If we are referencing a function that is static or is known to be
12942 in this file, make the SYMBOL_REF special. We can use this to indicate
12943 that we can branch to this function without emitting a no-op after the
12944 call. For real AIX calling sequences, we also replace the
12945 function name with the real name (1 or 2 leading .'s), rather than
12946 the function descriptor name. This saves a lot of overriding code
12947 to read the prefixes. */
12948
12949 static void
12950 rs6000_elf_encode_section_info (decl, first)
12951 tree decl;
12952 int first;
12953 {
12954 if (!first)
12955 return;
12956
12957 if (TREE_CODE (decl) == FUNCTION_DECL)
12958 {
12959 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
12960 if ((*targetm.binds_local_p) (decl))
12961 SYMBOL_REF_FLAG (sym_ref) = 1;
12962
12963 if (!TARGET_AIX && DEFAULT_ABI == ABI_AIX)
12964 {
12965 size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
12966 size_t len2 = strlen (XSTR (sym_ref, 0));
12967 char *str = alloca (len1 + len2 + 1);
12968 str[0] = '.';
12969 str[1] = '.';
12970 memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
12971
12972 XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
12973 }
12974 }
12975 else if (rs6000_sdata != SDATA_NONE
12976 && DEFAULT_ABI == ABI_V4
12977 && TREE_CODE (decl) == VAR_DECL)
12978 {
12979 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
12980 int size = int_size_in_bytes (TREE_TYPE (decl));
12981 tree section_name = DECL_SECTION_NAME (decl);
12982 const char *name = (char *)0;
12983 int len = 0;
12984
12985 if ((*targetm.binds_local_p) (decl))
12986 SYMBOL_REF_FLAG (sym_ref) = 1;
12987
12988 if (section_name)
12989 {
12990 if (TREE_CODE (section_name) == STRING_CST)
12991 {
12992 name = TREE_STRING_POINTER (section_name);
12993 len = TREE_STRING_LENGTH (section_name);
12994 }
12995 else
12996 abort ();
12997 }
12998
12999 if (name
13000 ? ((len == sizeof (".sdata") - 1
13001 && strcmp (name, ".sdata") == 0)
13002 || (len == sizeof (".sdata2") - 1
13003 && strcmp (name, ".sdata2") == 0)
13004 || (len == sizeof (".sbss") - 1
13005 && strcmp (name, ".sbss") == 0)
13006 || (len == sizeof (".sbss2") - 1
13007 && strcmp (name, ".sbss2") == 0)
13008 || (len == sizeof (".PPC.EMB.sdata0") - 1
13009 && strcmp (name, ".PPC.EMB.sdata0") == 0)
13010 || (len == sizeof (".PPC.EMB.sbss0") - 1
13011 && strcmp (name, ".PPC.EMB.sbss0") == 0))
13012 : (size > 0 && size <= g_switch_value))
13013 {
13014 size_t len = strlen (XSTR (sym_ref, 0));
13015 char *str = alloca (len + 2);
13016
13017 str[0] = '@';
13018 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
13019 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
13020 }
13021 }
13022 }
13023
13024 static const char *
13025 rs6000_elf_strip_name_encoding (str)
13026 const char *str;
13027 {
13028 while (*str == '*' || *str == '@')
13029 str++;
13030 return str;
13031 }
13032
13033 static bool
13034 rs6000_elf_in_small_data_p (decl)
13035 tree decl;
13036 {
13037 if (rs6000_sdata == SDATA_NONE)
13038 return false;
13039
13040 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
13041 {
13042 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
13043 if (strcmp (section, ".sdata") == 0
13044 || strcmp (section, ".sdata2") == 0
13045 || strcmp (section, ".sbss") == 0)
13046 return true;
13047 }
13048 else
13049 {
13050 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
13051
13052 if (size > 0
13053 && size <= g_switch_value
13054 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
13055 return true;
13056 }
13057
13058 return false;
13059 }
13060
13061 #endif /* USING_ELFOS_H */
13062
13063 \f
13064 /* Return a REG that occurs in ADDR with coefficient 1.
13065 ADDR can be effectively incremented by incrementing REG.
13066
13067 r0 is special and we must not select it as an address
13068 register by this routine since our caller will try to
13069 increment the returned register via an "la" instruction. */
13070
13071 struct rtx_def *
13072 find_addr_reg (addr)
13073 rtx addr;
13074 {
13075 while (GET_CODE (addr) == PLUS)
13076 {
13077 if (GET_CODE (XEXP (addr, 0)) == REG
13078 && REGNO (XEXP (addr, 0)) != 0)
13079 addr = XEXP (addr, 0);
13080 else if (GET_CODE (XEXP (addr, 1)) == REG
13081 && REGNO (XEXP (addr, 1)) != 0)
13082 addr = XEXP (addr, 1);
13083 else if (CONSTANT_P (XEXP (addr, 0)))
13084 addr = XEXP (addr, 1);
13085 else if (CONSTANT_P (XEXP (addr, 1)))
13086 addr = XEXP (addr, 0);
13087 else
13088 abort ();
13089 }
13090 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
13091 return addr;
13092 abort ();
13093 }
13094
13095 void
13096 rs6000_fatal_bad_address (op)
13097 rtx op;
13098 {
13099 fatal_insn ("bad address", op);
13100 }
13101
13102 #if TARGET_MACHO
13103
13104 #if 0
13105 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
13106 reference and a constant. */
13107
13108 int
13109 symbolic_operand (op)
13110 rtx op;
13111 {
13112 switch (GET_CODE (op))
13113 {
13114 case SYMBOL_REF:
13115 case LABEL_REF:
13116 return 1;
13117 case CONST:
13118 op = XEXP (op, 0);
13119 return (GET_CODE (op) == SYMBOL_REF ||
13120 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
13121 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
13122 && GET_CODE (XEXP (op, 1)) == CONST_INT);
13123 default:
13124 return 0;
13125 }
13126 }
13127 #endif
13128
13129 #ifdef RS6000_LONG_BRANCH
13130
13131 static tree stub_list = 0;
13132
13133 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
13134 procedure calls to the linked list. */
13135
13136 void
13137 add_compiler_stub (label_name, function_name, line_number)
13138 tree label_name;
13139 tree function_name;
13140 int line_number;
13141 {
13142 tree stub = build_tree_list (function_name, label_name);
13143 TREE_TYPE (stub) = build_int_2 (line_number, 0);
13144 TREE_CHAIN (stub) = stub_list;
13145 stub_list = stub;
13146 }
13147
13148 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
13149 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
13150 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
13151
13152 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
13153 handling procedure calls from the linked list and initializes the
13154 linked list. */
13155
13156 void
13157 output_compiler_stub ()
13158 {
13159 char tmp_buf[256];
13160 char label_buf[256];
13161 tree stub;
13162
13163 if (!flag_pic)
13164 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13165 {
13166 fprintf (asm_out_file,
13167 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
13168
13169 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13170 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
13171 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
13172 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13173
13174 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
13175 strcpy (label_buf,
13176 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
13177 else
13178 {
13179 label_buf[0] = '_';
13180 strcpy (label_buf+1,
13181 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
13182 }
13183
13184 strcpy (tmp_buf, "lis r12,hi16(");
13185 strcat (tmp_buf, label_buf);
13186 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
13187 strcat (tmp_buf, label_buf);
13188 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
13189 output_asm_insn (tmp_buf, 0);
13190
13191 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13192 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
13193 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
13194 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13195 }
13196
13197 stub_list = 0;
13198 }
13199
13200 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
13201 already there or not. */
13202
13203 int
13204 no_previous_def (function_name)
13205 tree function_name;
13206 {
13207 tree stub;
13208 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13209 if (function_name == STUB_FUNCTION_NAME (stub))
13210 return 0;
13211 return 1;
13212 }
13213
13214 /* GET_PREV_LABEL gets the label name from the previous definition of
13215 the function. */
13216
13217 tree
13218 get_prev_label (function_name)
13219 tree function_name;
13220 {
13221 tree stub;
13222 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13223 if (function_name == STUB_FUNCTION_NAME (stub))
13224 return STUB_LABEL_NAME (stub);
13225 return 0;
13226 }
13227
13228 /* INSN is either a function call or a millicode call. It may have an
13229 unconditional jump in its delay slot.
13230
13231 CALL_DEST is the routine we are calling. */
13232
13233 char *
13234 output_call (insn, call_dest, operand_number)
13235 rtx insn;
13236 rtx call_dest;
13237 int operand_number;
13238 {
13239 static char buf[256];
13240 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
13241 {
13242 tree labelname;
13243 tree funname = get_identifier (XSTR (call_dest, 0));
13244
13245 if (no_previous_def (funname))
13246 {
13247 int line_number = 0;
13248 rtx label_rtx = gen_label_rtx ();
13249 char *label_buf, temp_buf[256];
13250 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
13251 CODE_LABEL_NUMBER (label_rtx));
13252 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
13253 labelname = get_identifier (label_buf);
13254 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
13255 if (insn)
13256 line_number = NOTE_LINE_NUMBER (insn);
13257 add_compiler_stub (labelname, funname, line_number);
13258 }
13259 else
13260 labelname = get_prev_label (funname);
13261
13262 sprintf (buf, "jbsr %%z%d,%.246s",
13263 operand_number, IDENTIFIER_POINTER (labelname));
13264 return buf;
13265 }
13266 else
13267 {
13268 sprintf (buf, "bl %%z%d", operand_number);
13269 return buf;
13270 }
13271 }
13272
13273 #endif /* RS6000_LONG_BRANCH */
13274
13275 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
13276 do { \
13277 const char *const symbol_ = (SYMBOL); \
13278 char *buffer_ = (BUF); \
13279 if (symbol_[0] == '"') \
13280 { \
13281 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
13282 } \
13283 else if (name_needs_quotes(symbol_)) \
13284 { \
13285 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
13286 } \
13287 else \
13288 { \
13289 sprintf(buffer_, "L%d$%s", (N), symbol_); \
13290 } \
13291 } while (0)
13292
13293
13294 /* Generate PIC and indirect symbol stubs. */
13295
13296 void
13297 machopic_output_stub (file, symb, stub)
13298 FILE *file;
13299 const char *symb, *stub;
13300 {
13301 unsigned int length;
13302 char *symbol_name, *lazy_ptr_name;
13303 char *local_label_0;
13304 static int label = 0;
13305
13306 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
13307 symb = (*targetm.strip_name_encoding) (symb);
13308
13309 label += 1;
13310
13311 length = strlen (symb);
13312 symbol_name = alloca (length + 32);
13313 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
13314
13315 lazy_ptr_name = alloca (length + 32);
13316 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
13317
13318 local_label_0 = alloca (length + 32);
13319 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
13320
13321 if (flag_pic == 2)
13322 machopic_picsymbol_stub1_section ();
13323 else
13324 machopic_symbol_stub1_section ();
13325 fprintf (file, "\t.align 2\n");
13326
13327 fprintf (file, "%s:\n", stub);
13328 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13329
13330 if (flag_pic == 2)
13331 {
13332 fprintf (file, "\tmflr r0\n");
13333 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
13334 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
13335 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
13336 lazy_ptr_name, local_label_0);
13337 fprintf (file, "\tmtlr r0\n");
13338 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
13339 lazy_ptr_name, local_label_0);
13340 fprintf (file, "\tmtctr r12\n");
13341 fprintf (file, "\tbctr\n");
13342 }
13343 else
13344 {
13345 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
13346 fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
13347 fprintf (file, "\tmtctr r12\n");
13348 fprintf (file, "\tbctr\n");
13349 }
13350
13351 machopic_lazy_symbol_ptr_section ();
13352 fprintf (file, "%s:\n", lazy_ptr_name);
13353 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13354 fprintf (file, "\t.long dyld_stub_binding_helper\n");
13355 }
13356
13357 /* Legitimize PIC addresses. If the address is already
13358 position-independent, we return ORIG. Newly generated
13359 position-independent addresses go into a reg. This is REG if non
13360 zero, otherwise we allocate register(s) as necessary. */
13361
13362 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
13363
13364 rtx
13365 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
13366 rtx orig;
13367 enum machine_mode mode;
13368 rtx reg;
13369 {
13370 rtx base, offset;
13371
13372 if (reg == NULL && ! reload_in_progress && ! reload_completed)
13373 reg = gen_reg_rtx (Pmode);
13374
13375 if (GET_CODE (orig) == CONST)
13376 {
13377 if (GET_CODE (XEXP (orig, 0)) == PLUS
13378 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
13379 return orig;
13380
13381 if (GET_CODE (XEXP (orig, 0)) == PLUS)
13382 {
13383 base =
13384 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
13385 Pmode, reg);
13386 offset =
13387 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
13388 Pmode, reg);
13389 }
13390 else
13391 abort ();
13392
13393 if (GET_CODE (offset) == CONST_INT)
13394 {
13395 if (SMALL_INT (offset))
13396 return plus_constant (base, INTVAL (offset));
13397 else if (! reload_in_progress && ! reload_completed)
13398 offset = force_reg (Pmode, offset);
13399 else
13400 {
13401 rtx mem = force_const_mem (Pmode, orig);
13402 return machopic_legitimize_pic_address (mem, Pmode, reg);
13403 }
13404 }
13405 return gen_rtx (PLUS, Pmode, base, offset);
13406 }
13407
13408 /* Fall back on generic machopic code. */
13409 return machopic_legitimize_pic_address (orig, mode, reg);
13410 }
13411
13412 /* This is just a placeholder to make linking work without having to
13413 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
13414 ever needed for Darwin (not too likely!) this would have to get a
13415 real definition. */
13416
13417 void
13418 toc_section ()
13419 {
13420 }
13421
13422 #endif /* TARGET_MACHO */
13423
13424 #if TARGET_ELF
13425 static unsigned int
13426 rs6000_elf_section_type_flags (decl, name, reloc)
13427 tree decl;
13428 const char *name;
13429 int reloc;
13430 {
13431 unsigned int flags
13432 = default_section_type_flags_1 (decl, name, reloc,
13433 flag_pic || DEFAULT_ABI == ABI_AIX);
13434
13435 if (TARGET_RELOCATABLE)
13436 flags |= SECTION_WRITE;
13437
13438 return flags;
13439 }
13440
13441 /* Record an element in the table of global constructors. SYMBOL is
13442 a SYMBOL_REF of the function to be called; PRIORITY is a number
13443 between 0 and MAX_INIT_PRIORITY.
13444
13445 This differs from default_named_section_asm_out_constructor in
13446 that we have special handling for -mrelocatable. */
13447
13448 static void
13449 rs6000_elf_asm_out_constructor (symbol, priority)
13450 rtx symbol;
13451 int priority;
13452 {
13453 const char *section = ".ctors";
13454 char buf[16];
13455
13456 if (priority != DEFAULT_INIT_PRIORITY)
13457 {
13458 sprintf (buf, ".ctors.%.5u",
13459 /* Invert the numbering so the linker puts us in the proper
13460 order; constructors are run from right to left, and the
13461 linker sorts in increasing order. */
13462 MAX_INIT_PRIORITY - priority);
13463 section = buf;
13464 }
13465
13466 named_section_flags (section, SECTION_WRITE);
13467 assemble_align (POINTER_SIZE);
13468
13469 if (TARGET_RELOCATABLE)
13470 {
13471 fputs ("\t.long (", asm_out_file);
13472 output_addr_const (asm_out_file, symbol);
13473 fputs (")@fixup\n", asm_out_file);
13474 }
13475 else
13476 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13477 }
13478
13479 static void
13480 rs6000_elf_asm_out_destructor (symbol, priority)
13481 rtx symbol;
13482 int priority;
13483 {
13484 const char *section = ".dtors";
13485 char buf[16];
13486
13487 if (priority != DEFAULT_INIT_PRIORITY)
13488 {
13489 sprintf (buf, ".dtors.%.5u",
13490 /* Invert the numbering so the linker puts us in the proper
13491 order; constructors are run from right to left, and the
13492 linker sorts in increasing order. */
13493 MAX_INIT_PRIORITY - priority);
13494 section = buf;
13495 }
13496
13497 named_section_flags (section, SECTION_WRITE);
13498 assemble_align (POINTER_SIZE);
13499
13500 if (TARGET_RELOCATABLE)
13501 {
13502 fputs ("\t.long (", asm_out_file);
13503 output_addr_const (asm_out_file, symbol);
13504 fputs (")@fixup\n", asm_out_file);
13505 }
13506 else
13507 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13508 }
13509 #endif
13510
13511 #if TARGET_XCOFF
13512 static void
13513 rs6000_xcoff_asm_globalize_label (stream, name)
13514 FILE *stream;
13515 const char *name;
13516 {
13517 fputs (GLOBAL_ASM_OP, stream);
13518 RS6000_OUTPUT_BASENAME (stream, name);
13519 putc ('\n', stream);
13520 }
13521
13522 static void
13523 rs6000_xcoff_asm_named_section (name, flags)
13524 const char *name;
13525 unsigned int flags;
13526 {
13527 int smclass;
13528 static const char * const suffix[3] = { "PR", "RO", "RW" };
13529
13530 if (flags & SECTION_CODE)
13531 smclass = 0;
13532 else if (flags & SECTION_WRITE)
13533 smclass = 2;
13534 else
13535 smclass = 1;
13536
13537 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
13538 (flags & SECTION_CODE) ? "." : "",
13539 name, suffix[smclass], flags & SECTION_ENTSIZE);
13540 }
13541
13542 static void
13543 rs6000_xcoff_select_section (decl, reloc, align)
13544 tree decl;
13545 int reloc;
13546 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13547 {
13548 if (decl_readonly_section_1 (decl, reloc, 1))
13549 {
13550 if (TREE_PUBLIC (decl))
13551 read_only_data_section ();
13552 else
13553 read_only_private_data_section ();
13554 }
13555 else
13556 {
13557 if (TREE_PUBLIC (decl))
13558 data_section ();
13559 else
13560 private_data_section ();
13561 }
13562 }
13563
13564 static void
13565 rs6000_xcoff_unique_section (decl, reloc)
13566 tree decl;
13567 int reloc ATTRIBUTE_UNUSED;
13568 {
13569 const char *name;
13570
13571 /* Use select_section for private and uninitialized data. */
13572 if (!TREE_PUBLIC (decl)
13573 || DECL_COMMON (decl)
13574 || DECL_INITIAL (decl) == NULL_TREE
13575 || DECL_INITIAL (decl) == error_mark_node
13576 || (flag_zero_initialized_in_bss
13577 && initializer_zerop (DECL_INITIAL (decl))))
13578 return;
13579
13580 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
13581 name = (*targetm.strip_name_encoding) (name);
13582 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
13583 }
13584
13585 /* Select section for constant in constant pool.
13586
13587 On RS/6000, all constants are in the private read-only data area.
13588 However, if this is being placed in the TOC it must be output as a
13589 toc entry. */
13590
13591 static void
13592 rs6000_xcoff_select_rtx_section (mode, x, align)
13593 enum machine_mode mode;
13594 rtx x;
13595 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
13596 {
13597 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
13598 toc_section ();
13599 else
13600 read_only_private_data_section ();
13601 }
13602
13603 /* Remove any trailing [DS] or the like from the symbol name. */
13604
13605 static const char *
13606 rs6000_xcoff_strip_name_encoding (name)
13607 const char *name;
13608 {
13609 size_t len;
13610 if (*name == '*')
13611 name++;
13612 len = strlen (name);
13613 if (name[len - 1] == ']')
13614 return ggc_alloc_string (name, len - 4);
13615 else
13616 return name;
13617 }
13618
13619 /* Section attributes. AIX is always PIC. */
13620
13621 static unsigned int
13622 rs6000_xcoff_section_type_flags (decl, name, reloc)
13623 tree decl;
13624 const char *name;
13625 int reloc;
13626 {
13627 unsigned int align;
13628 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
13629
13630 /* Align to at least UNIT size. */
13631 if (flags & SECTION_CODE)
13632 align = MIN_UNITS_PER_WORD;
13633 else
13634 /* Increase alignment of large objects if not already stricter. */
13635 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
13636 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
13637 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
13638
13639 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
13640 }
13641
13642 static void
13643 rs6000_xcoff_encode_section_info (decl, first)
13644 tree decl;
13645 int first ATTRIBUTE_UNUSED;
13646 {
13647 if (TREE_CODE (decl) == FUNCTION_DECL
13648 && (*targetm.binds_local_p) (decl))
13649 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
13650 }
13651 #endif /* TARGET_XCOFF */
13652
13653 #if TARGET_MACHO
13654 /* Cross-module name binding. Darwin does not support overriding
13655 functions at dynamic-link time. */
13656
13657 static bool
13658 rs6000_binds_local_p (decl)
13659 tree decl;
13660 {
13661 return default_binds_local_p_1 (decl, 0);
13662 }
13663 #endif
13664
13665 /* Compute a (partial) cost for rtx X. Return true if the complete
13666 cost has been computed, and false if subexpressions should be
13667 scanned. In either case, *TOTAL contains the cost result. */
13668
13669 static bool
13670 rs6000_rtx_costs (x, code, outer_code, total)
13671 rtx x;
13672 int code, outer_code ATTRIBUTE_UNUSED;
13673 int *total;
13674 {
13675 switch (code)
13676 {
13677 /* On the RS/6000, if it is valid in the insn, it is free.
13678 So this always returns 0. */
13679 case CONST_INT:
13680 case CONST:
13681 case LABEL_REF:
13682 case SYMBOL_REF:
13683 case CONST_DOUBLE:
13684 case HIGH:
13685 *total = 0;
13686 return true;
13687
13688 case PLUS:
13689 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
13690 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
13691 + 0x8000) >= 0x10000)
13692 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
13693 ? COSTS_N_INSNS (2)
13694 : COSTS_N_INSNS (1));
13695 return true;
13696
13697 case AND:
13698 case IOR:
13699 case XOR:
13700 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
13701 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
13702 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
13703 ? COSTS_N_INSNS (2)
13704 : COSTS_N_INSNS (1));
13705 return true;
13706
13707 case MULT:
13708 if (optimize_size)
13709 {
13710 *total = COSTS_N_INSNS (2);
13711 return true;
13712 }
13713 switch (rs6000_cpu)
13714 {
13715 case PROCESSOR_RIOS1:
13716 case PROCESSOR_PPC405:
13717 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13718 ? COSTS_N_INSNS (5)
13719 : (INTVAL (XEXP (x, 1)) >= -256
13720 && INTVAL (XEXP (x, 1)) <= 255)
13721 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
13722 return true;
13723
13724 case PROCESSOR_RS64A:
13725 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13726 ? GET_MODE (XEXP (x, 1)) != DImode
13727 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
13728 : (INTVAL (XEXP (x, 1)) >= -256
13729 && INTVAL (XEXP (x, 1)) <= 255)
13730 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
13731 return true;
13732
13733 case PROCESSOR_RIOS2:
13734 case PROCESSOR_MPCCORE:
13735 case PROCESSOR_PPC604e:
13736 *total = COSTS_N_INSNS (2);
13737 return true;
13738
13739 case PROCESSOR_PPC601:
13740 *total = COSTS_N_INSNS (5);
13741 return true;
13742
13743 case PROCESSOR_PPC603:
13744 case PROCESSOR_PPC7400:
13745 case PROCESSOR_PPC750:
13746 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13747 ? COSTS_N_INSNS (5)
13748 : (INTVAL (XEXP (x, 1)) >= -256
13749 && INTVAL (XEXP (x, 1)) <= 255)
13750 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
13751 return true;
13752
13753 case PROCESSOR_PPC7450:
13754 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13755 ? COSTS_N_INSNS (4)
13756 : COSTS_N_INSNS (3));
13757 return true;
13758
13759 case PROCESSOR_PPC403:
13760 case PROCESSOR_PPC604:
13761 case PROCESSOR_PPC8540:
13762 *total = COSTS_N_INSNS (4);
13763 return true;
13764
13765 case PROCESSOR_PPC620:
13766 case PROCESSOR_PPC630:
13767 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13768 ? GET_MODE (XEXP (x, 1)) != DImode
13769 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
13770 : (INTVAL (XEXP (x, 1)) >= -256
13771 && INTVAL (XEXP (x, 1)) <= 255)
13772 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
13773 return true;
13774
13775 case PROCESSOR_POWER4:
13776 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
13777 ? GET_MODE (XEXP (x, 1)) != DImode
13778 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
13779 : COSTS_N_INSNS (2));
13780 return true;
13781
13782 default:
13783 abort ();
13784 }
13785
13786 case DIV:
13787 case MOD:
13788 if (GET_CODE (XEXP (x, 1)) == CONST_INT
13789 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
13790 {
13791 *total = COSTS_N_INSNS (2);
13792 return true;
13793 }
13794 /* FALLTHRU */
13795
13796 case UDIV:
13797 case UMOD:
13798 switch (rs6000_cpu)
13799 {
13800 case PROCESSOR_RIOS1:
13801 *total = COSTS_N_INSNS (19);
13802 return true;
13803
13804 case PROCESSOR_RIOS2:
13805 *total = COSTS_N_INSNS (13);
13806 return true;
13807
13808 case PROCESSOR_RS64A:
13809 *total = (GET_MODE (XEXP (x, 1)) != DImode
13810 ? COSTS_N_INSNS (65)
13811 : COSTS_N_INSNS (67));
13812 return true;
13813
13814 case PROCESSOR_MPCCORE:
13815 *total = COSTS_N_INSNS (6);
13816 return true;
13817
13818 case PROCESSOR_PPC403:
13819 *total = COSTS_N_INSNS (33);
13820 return true;
13821
13822 case PROCESSOR_PPC405:
13823 *total = COSTS_N_INSNS (35);
13824 return true;
13825
13826 case PROCESSOR_PPC601:
13827 *total = COSTS_N_INSNS (36);
13828 return true;
13829
13830 case PROCESSOR_PPC603:
13831 *total = COSTS_N_INSNS (37);
13832 return true;
13833
13834 case PROCESSOR_PPC604:
13835 case PROCESSOR_PPC604e:
13836 *total = COSTS_N_INSNS (20);
13837 return true;
13838
13839 case PROCESSOR_PPC620:
13840 case PROCESSOR_PPC630:
13841 *total = (GET_MODE (XEXP (x, 1)) != DImode
13842 ? COSTS_N_INSNS (21)
13843 : COSTS_N_INSNS (37));
13844 return true;
13845
13846 case PROCESSOR_PPC750:
13847 case PROCESSOR_PPC8540:
13848 case PROCESSOR_PPC7400:
13849 *total = COSTS_N_INSNS (19);
13850 return true;
13851
13852 case PROCESSOR_PPC7450:
13853 *total = COSTS_N_INSNS (23);
13854 return true;
13855
13856 case PROCESSOR_POWER4:
13857 *total = (GET_MODE (XEXP (x, 1)) != DImode
13858 ? COSTS_N_INSNS (18)
13859 : COSTS_N_INSNS (34));
13860 return true;
13861
13862 default:
13863 abort ();
13864 }
13865
13866 case FFS:
13867 *total = COSTS_N_INSNS (4);
13868 return true;
13869
13870 case MEM:
13871 /* MEM should be slightly more expensive than (plus (reg) (const)) */
13872 *total = 5;
13873 return true;
13874
13875 default:
13876 return false;
13877 }
13878 }
13879
13880 /* A C expression returning the cost of moving data from a register of class
13881 CLASS1 to one of CLASS2. */
13882
13883 int
13884 rs6000_register_move_cost (mode, from, to)
13885 enum machine_mode mode;
13886 enum reg_class from, to;
13887 {
13888 /* Moves from/to GENERAL_REGS. */
13889 if (reg_classes_intersect_p (to, GENERAL_REGS)
13890 || reg_classes_intersect_p (from, GENERAL_REGS))
13891 {
13892 if (! reg_classes_intersect_p (to, GENERAL_REGS))
13893 from = to;
13894
13895 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
13896 return (rs6000_memory_move_cost (mode, from, 0)
13897 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
13898
13899 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift...*/
13900 else if (from == CR_REGS)
13901 return 4;
13902
13903 else
13904 /* A move will cost one instruction per GPR moved. */
13905 return 2 * HARD_REGNO_NREGS (0, mode);
13906 }
13907
13908 /* Moving between two similar registers is just one instruction. */
13909 else if (reg_classes_intersect_p (to, from))
13910 return mode == TFmode ? 4 : 2;
13911
13912 /* Everything else has to go through GENERAL_REGS. */
13913 else
13914 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
13915 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
13916 }
13917
13918 /* A C expressions returning the cost of moving data of MODE from a register to
13919 or from memory. */
13920
13921 int
13922 rs6000_memory_move_cost (mode, class, in)
13923 enum machine_mode mode;
13924 enum reg_class class;
13925 int in ATTRIBUTE_UNUSED;
13926 {
13927 if (reg_classes_intersect_p (class, GENERAL_REGS))
13928 return 4 * HARD_REGNO_NREGS (0, mode);
13929 else if (reg_classes_intersect_p (class, FLOAT_REGS))
13930 return 4 * HARD_REGNO_NREGS (32, mode);
13931 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
13932 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
13933 else
13934 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
13935 }
13936
13937 /* Return true if TYPE is of type __ev64_opaque__. */
13938
13939 static bool
13940 is_ev64_opaque_type (type)
13941 tree type;
13942 {
13943 return (TARGET_SPE
13944 && TREE_CODE (type) == VECTOR_TYPE
13945 && TYPE_NAME (type)
13946 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
13947 && DECL_NAME (TYPE_NAME (type))
13948 && strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))),
13949 "__ev64_opaque__") == 0);
13950 }
13951
13952 static rtx
13953 rs6000_dwarf_register_span (reg)
13954 rtx reg;
13955 {
13956 unsigned regno;
13957
13958 if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
13959 return NULL_RTX;
13960
13961 regno = REGNO (reg);
13962
13963 /* The duality of the SPE register size wreaks all kinds of havoc.
13964 This is a way of distinguishing r0 in 32-bits from r0 in
13965 64-bits. */
13966 return
13967 gen_rtx_PARALLEL (VOIDmode,
13968 BYTES_BIG_ENDIAN
13969 ? gen_rtvec (2,
13970 gen_rtx_REG (SImode, regno + 1200),
13971 gen_rtx_REG (SImode, regno))
13972 : gen_rtvec (2,
13973 gen_rtx_REG (SImode, regno),
13974 gen_rtx_REG (SImode, regno + 1200)));
13975 }
13976
13977 #include "gt-rs6000.h"