c1d891118258b658f2808cd5f43f363ddcf51fd7
[gcc.git] / gcc / config / rs6000 / rs6000.c
1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
5
6 This file is part of GNU CC.
7
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
12
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "regs.h"
27 #include "hard-reg-set.h"
28 #include "real.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "flags.h"
33 #include "recog.h"
34 #include "obstack.h"
35 #include "tree.h"
36 #include "expr.h"
37 #include "optabs.h"
38 #include "except.h"
39 #include "function.h"
40 #include "output.h"
41 #include "basic-block.h"
42 #include "integrate.h"
43 #include "toplev.h"
44 #include "ggc.h"
45 #include "hashtab.h"
46 #include "tm_p.h"
47 #include "target.h"
48 #include "target-def.h"
49 #include "langhooks.h"
50 #include "reload.h"
51
52 #ifndef TARGET_NO_PROTOTYPE
53 #define TARGET_NO_PROTOTYPE 0
54 #endif
55
56 #define min(A,B) ((A) < (B) ? (A) : (B))
57 #define max(A,B) ((A) > (B) ? (A) : (B))
58
59 /* Target cpu type */
60
61 enum processor_type rs6000_cpu;
62 struct rs6000_cpu_select rs6000_select[3] =
63 {
64 /* switch name, tune arch */
65 { (const char *)0, "--with-cpu=", 1, 1 },
66 { (const char *)0, "-mcpu=", 1, 1 },
67 { (const char *)0, "-mtune=", 1, 0 },
68 };
69
70 /* Size of long double */
71 const char *rs6000_long_double_size_string;
72 int rs6000_long_double_type_size;
73
74 /* Whether -mabi=altivec has appeared */
75 int rs6000_altivec_abi;
76
77 /* Set to non-zero once AIX common-mode calls have been defined. */
78 static int common_mode_defined;
79
80 /* Save information from a "cmpxx" operation until the branch or scc is
81 emitted. */
82 rtx rs6000_compare_op0, rs6000_compare_op1;
83 int rs6000_compare_fp_p;
84
85 /* Label number of label created for -mrelocatable, to call to so we can
86 get the address of the GOT section */
87 int rs6000_pic_labelno;
88
89 #ifdef USING_ELFOS_H
90 /* Which abi to adhere to */
91 const char *rs6000_abi_name = RS6000_ABI_NAME;
92
93 /* Semantics of the small data area */
94 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
95
96 /* Which small data model to use */
97 const char *rs6000_sdata_name = (char *)0;
98
99 /* Counter for labels which are to be placed in .fixup. */
100 int fixuplabelno = 0;
101 #endif
102
103 /* ABI enumeration available for subtarget to use. */
104 enum rs6000_abi rs6000_current_abi;
105
106 /* ABI string from -mabi= option. */
107 const char *rs6000_abi_string;
108
109 /* Debug flags */
110 const char *rs6000_debug_name;
111 int rs6000_debug_stack; /* debug stack applications */
112 int rs6000_debug_arg; /* debug argument handling */
113
114 /* Flag to say the TOC is initialized */
115 int toc_initialized;
116 char toc_label_name[10];
117
118 /* Alias set for saves and restores from the rs6000 stack. */
119 static int rs6000_sr_alias_set;
120
121 static void rs6000_add_gc_roots PARAMS ((void));
122 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
123 static rtx expand_block_move_mem PARAMS ((enum machine_mode, rtx, rtx));
124 static void validate_condition_mode
125 PARAMS ((enum rtx_code, enum machine_mode));
126 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
127 static void rs6000_maybe_dead PARAMS ((rtx));
128 static void rs6000_emit_stack_tie PARAMS ((void));
129 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
130 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
131 static unsigned rs6000_hash_constant PARAMS ((rtx));
132 static unsigned toc_hash_function PARAMS ((const void *));
133 static int toc_hash_eq PARAMS ((const void *, const void *));
134 static int toc_hash_mark_entry PARAMS ((void **, void *));
135 static void toc_hash_mark_table PARAMS ((void *));
136 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
137 static void rs6000_free_machine_status PARAMS ((struct function *));
138 static void rs6000_init_machine_status PARAMS ((struct function *));
139 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
140 static int rs6000_ra_ever_killed PARAMS ((void));
141 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
142 const struct attribute_spec rs6000_attribute_table[];
143 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
144 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
145 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
146 HOST_WIDE_INT, HOST_WIDE_INT));
147 #if TARGET_ELF
148 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
149 int));
150 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
151 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
152 #endif
153 #ifdef OBJECT_FORMAT_COFF
154 static void xcoff_asm_named_section PARAMS ((const char *, unsigned int));
155 #endif
156 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
157 static int rs6000_adjust_priority PARAMS ((rtx, int));
158 static int rs6000_issue_rate PARAMS ((void));
159
160 static void rs6000_init_builtins PARAMS ((void));
161 static void altivec_init_builtins PARAMS ((void));
162 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
163 static rtx altivec_expand_builtin PARAMS ((tree, rtx));
164 static rtx altivec_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
165 static rtx altivec_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
166 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
167 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
168 static rtx altivec_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
169 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
170 static void rs6000_parse_abi_options PARAMS ((void));
171 static int first_altivec_reg_to_save PARAMS ((void));
172 static unsigned int compute_vrsave_mask PARAMS ((void));
173 static void is_altivec_return_reg PARAMS ((rtx, void *));
174 int vrsave_operation PARAMS ((rtx, enum machine_mode));
175 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
176 static void altivec_frame_fixup PARAMS ((rtx, rtx, HOST_WIDE_INT));
177 static int easy_vector_constant PARAMS ((rtx));
178 \f
179 /* Default register names. */
180 char rs6000_reg_names[][8] =
181 {
182 "0", "1", "2", "3", "4", "5", "6", "7",
183 "8", "9", "10", "11", "12", "13", "14", "15",
184 "16", "17", "18", "19", "20", "21", "22", "23",
185 "24", "25", "26", "27", "28", "29", "30", "31",
186 "0", "1", "2", "3", "4", "5", "6", "7",
187 "8", "9", "10", "11", "12", "13", "14", "15",
188 "16", "17", "18", "19", "20", "21", "22", "23",
189 "24", "25", "26", "27", "28", "29", "30", "31",
190 "mq", "lr", "ctr","ap",
191 "0", "1", "2", "3", "4", "5", "6", "7",
192 "xer",
193 /* AltiVec registers. */
194 "0", "1", "2", "3", "4", "5", "6", "7",
195 "8", "9", "10", "11", "12", "13", "14", "15",
196 "16", "17", "18", "19", "20", "21", "22", "23",
197 "24", "25", "26", "27", "28", "29", "30", "31",
198 "vrsave"
199 };
200
201 #ifdef TARGET_REGNAMES
202 static const char alt_reg_names[][8] =
203 {
204 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
205 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
206 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
207 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
208 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
209 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
210 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
211 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
212 "mq", "lr", "ctr", "ap",
213 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
214 "xer",
215 /* AltiVec registers. */
216 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
217 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
218 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
219 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
220 "%vrsave"
221 };
222 #endif
223 \f
224 #ifndef MASK_STRICT_ALIGN
225 #define MASK_STRICT_ALIGN 0
226 #endif
227 \f
228 /* Initialize the GCC target structure. */
229 #undef TARGET_ATTRIBUTE_TABLE
230 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
231
232 #undef TARGET_ASM_ALIGNED_DI_OP
233 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
234
235 /* Default unaligned ops are only provided for ELF. Find the ops needed
236 for non-ELF systems. */
237 #ifndef OBJECT_FORMAT_ELF
238 #ifdef OBJECT_FORMAT_COFF
239 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
240 64-bit targets. */
241 #undef TARGET_ASM_UNALIGNED_HI_OP
242 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
243 #undef TARGET_ASM_UNALIGNED_SI_OP
244 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
245 #undef TARGET_ASM_UNALIGNED_DI_OP
246 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
247 #else
248 /* For Darwin. */
249 #undef TARGET_ASM_UNALIGNED_HI_OP
250 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
251 #undef TARGET_ASM_UNALIGNED_SI_OP
252 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
253 #endif
254 #endif
255
256 /* This hook deals with fixups for relocatable code and DI-mode objects
257 in 64-bit code. */
258 #undef TARGET_ASM_INTEGER
259 #define TARGET_ASM_INTEGER rs6000_assemble_integer
260
261 #undef TARGET_ASM_FUNCTION_PROLOGUE
262 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
263 #undef TARGET_ASM_FUNCTION_EPILOGUE
264 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
265
266 #if TARGET_ELF
267 #undef TARGET_SECTION_TYPE_FLAGS
268 #define TARGET_SECTION_TYPE_FLAGS rs6000_elf_section_type_flags
269 #endif
270
271 #undef TARGET_SCHED_ISSUE_RATE
272 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
273 #undef TARGET_SCHED_ADJUST_COST
274 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
275 #undef TARGET_SCHED_ADJUST_PRIORITY
276 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
277
278 #undef TARGET_INIT_BUILTINS
279 #define TARGET_INIT_BUILTINS rs6000_init_builtins
280
281 #undef TARGET_EXPAND_BUILTIN
282 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
283
284 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
285 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
286
287 struct gcc_target targetm = TARGET_INITIALIZER;
288 \f
289 /* Override command line options. Mostly we process the processor
290 type and sometimes adjust other TARGET_ options. */
291
292 void
293 rs6000_override_options (default_cpu)
294 const char *default_cpu;
295 {
296 size_t i, j;
297 struct rs6000_cpu_select *ptr;
298
299 /* Simplify the entries below by making a mask for any POWER
300 variant and any PowerPC variant. */
301
302 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
303 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
304 | MASK_PPC_GFXOPT | MASK_POWERPC64)
305 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
306
307 static struct ptt
308 {
309 const char *const name; /* Canonical processor name. */
310 const enum processor_type processor; /* Processor type enum value. */
311 const int target_enable; /* Target flags to enable. */
312 const int target_disable; /* Target flags to disable. */
313 } const processor_target_table[]
314 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
315 POWER_MASKS | POWERPC_MASKS},
316 {"power", PROCESSOR_POWER,
317 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
318 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
319 {"power2", PROCESSOR_POWER,
320 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
321 POWERPC_MASKS | MASK_NEW_MNEMONICS},
322 {"power3", PROCESSOR_PPC630,
323 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
324 POWER_MASKS | MASK_PPC_GPOPT},
325 {"powerpc", PROCESSOR_POWERPC,
326 MASK_POWERPC | MASK_NEW_MNEMONICS,
327 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
328 {"powerpc64", PROCESSOR_POWERPC64,
329 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
330 POWER_MASKS | POWERPC_OPT_MASKS},
331 {"rios", PROCESSOR_RIOS1,
332 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
333 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
334 {"rios1", PROCESSOR_RIOS1,
335 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
336 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
337 {"rsc", PROCESSOR_PPC601,
338 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
339 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
340 {"rsc1", PROCESSOR_PPC601,
341 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
342 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
343 {"rios2", PROCESSOR_RIOS2,
344 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
345 POWERPC_MASKS | MASK_NEW_MNEMONICS},
346 {"rs64a", PROCESSOR_RS64A,
347 MASK_POWERPC | MASK_NEW_MNEMONICS,
348 POWER_MASKS | POWERPC_OPT_MASKS},
349 {"401", PROCESSOR_PPC403,
350 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
351 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
352 {"403", PROCESSOR_PPC403,
353 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
354 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
355 {"405", PROCESSOR_PPC405,
356 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
357 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
358 {"505", PROCESSOR_MPCCORE,
359 MASK_POWERPC | MASK_NEW_MNEMONICS,
360 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
361 {"601", PROCESSOR_PPC601,
362 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
363 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
364 {"602", PROCESSOR_PPC603,
365 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
366 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
367 {"603", PROCESSOR_PPC603,
368 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
369 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
370 {"603e", PROCESSOR_PPC603,
371 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
372 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
373 {"ec603e", PROCESSOR_PPC603,
374 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
375 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
376 {"604", PROCESSOR_PPC604,
377 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
378 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
379 {"604e", PROCESSOR_PPC604e,
380 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
381 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
382 {"620", PROCESSOR_PPC620,
383 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
384 POWER_MASKS | MASK_PPC_GPOPT},
385 {"630", PROCESSOR_PPC630,
386 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
387 POWER_MASKS | MASK_PPC_GPOPT},
388 {"740", PROCESSOR_PPC750,
389 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
390 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
391 {"750", PROCESSOR_PPC750,
392 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
393 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
394 {"7400", PROCESSOR_PPC7400,
395 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
396 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
397 {"7450", PROCESSOR_PPC7450,
398 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
399 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
400 {"801", PROCESSOR_MPCCORE,
401 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
402 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
403 {"821", PROCESSOR_MPCCORE,
404 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
405 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
406 {"823", PROCESSOR_MPCCORE,
407 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
408 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
409 {"860", PROCESSOR_MPCCORE,
410 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
411 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
412
413 size_t ptt_size = sizeof (processor_target_table) / sizeof (struct ptt);
414
415 /* Save current -mmultiple/-mno-multiple status. */
416 int multiple = TARGET_MULTIPLE;
417 /* Save current -mstring/-mno-string status. */
418 int string = TARGET_STRING;
419
420 /* Identify the processor type. */
421 rs6000_select[0].string = default_cpu;
422 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
423
424 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
425 {
426 ptr = &rs6000_select[i];
427 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
428 {
429 for (j = 0; j < ptt_size; j++)
430 if (! strcmp (ptr->string, processor_target_table[j].name))
431 {
432 if (ptr->set_tune_p)
433 rs6000_cpu = processor_target_table[j].processor;
434
435 if (ptr->set_arch_p)
436 {
437 target_flags |= processor_target_table[j].target_enable;
438 target_flags &= ~processor_target_table[j].target_disable;
439 }
440 break;
441 }
442
443 if (j == ptt_size)
444 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
445 }
446 }
447
448 /* If we are optimizing big endian systems for space, use the store
449 multiple instructions. */
450 if (BYTES_BIG_ENDIAN && optimize_size)
451 target_flags |= MASK_MULTIPLE;
452
453 /* If -mmultiple or -mno-multiple was explicitly used, don't
454 override with the processor default */
455 if (TARGET_MULTIPLE_SET)
456 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
457
458 /* If -mstring or -mno-string was explicitly used, don't override
459 with the processor default. */
460 if (TARGET_STRING_SET)
461 target_flags = (target_flags & ~MASK_STRING) | string;
462
463 /* Don't allow -mmultiple or -mstring on little endian systems
464 unless the cpu is a 750, because the hardware doesn't support the
465 instructions used in little endian mode, and causes an alignment
466 trap. The 750 does not cause an alignment trap (except when the
467 target is unaligned). */
468
469 if (! BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
470 {
471 if (TARGET_MULTIPLE)
472 {
473 target_flags &= ~MASK_MULTIPLE;
474 if (TARGET_MULTIPLE_SET)
475 warning ("-mmultiple is not supported on little endian systems");
476 }
477
478 if (TARGET_STRING)
479 {
480 target_flags &= ~MASK_STRING;
481 if (TARGET_STRING_SET)
482 warning ("-mstring is not supported on little endian systems");
483 }
484 }
485
486 if (flag_pic && DEFAULT_ABI == ABI_AIX)
487 {
488 warning ("-f%s ignored (all code is position independent)",
489 (flag_pic > 1) ? "PIC" : "pic");
490 flag_pic = 0;
491 }
492
493 #ifdef XCOFF_DEBUGGING_INFO
494 if (flag_function_sections && (write_symbols != NO_DEBUG)
495 && DEFAULT_ABI == ABI_AIX)
496 {
497 warning ("-ffunction-sections disabled on AIX when debugging");
498 flag_function_sections = 0;
499 }
500
501 if (flag_data_sections && (DEFAULT_ABI == ABI_AIX))
502 {
503 warning ("-fdata-sections not supported on AIX");
504 flag_data_sections = 0;
505 }
506 #endif
507
508 /* Set debug flags */
509 if (rs6000_debug_name)
510 {
511 if (! strcmp (rs6000_debug_name, "all"))
512 rs6000_debug_stack = rs6000_debug_arg = 1;
513 else if (! strcmp (rs6000_debug_name, "stack"))
514 rs6000_debug_stack = 1;
515 else if (! strcmp (rs6000_debug_name, "arg"))
516 rs6000_debug_arg = 1;
517 else
518 error ("unknown -mdebug-%s switch", rs6000_debug_name);
519 }
520
521 /* Set size of long double */
522 rs6000_long_double_type_size = 64;
523 if (rs6000_long_double_size_string)
524 {
525 char *tail;
526 int size = strtol (rs6000_long_double_size_string, &tail, 10);
527 if (*tail != '\0' || (size != 64 && size != 128))
528 error ("Unknown switch -mlong-double-%s",
529 rs6000_long_double_size_string);
530 else
531 rs6000_long_double_type_size = size;
532 }
533
534 /* Handle -mabi= options. */
535 rs6000_parse_abi_options ();
536
537 #ifdef TARGET_REGNAMES
538 /* If the user desires alternate register names, copy in the
539 alternate names now. */
540 if (TARGET_REGNAMES)
541 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
542 #endif
543
544 #ifdef SUBTARGET_OVERRIDE_OPTIONS
545 SUBTARGET_OVERRIDE_OPTIONS;
546 #endif
547 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
548 SUBSUBTARGET_OVERRIDE_OPTIONS;
549 #endif
550
551 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
552 If -maix-struct-return or -msvr4-struct-return was explicitly
553 used, don't override with the ABI default. */
554 if (!(target_flags & MASK_AIX_STRUCT_RET_SET))
555 {
556 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
557 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
558 else
559 target_flags |= MASK_AIX_STRUCT_RET;
560 }
561
562 /* Register global variables with the garbage collector. */
563 rs6000_add_gc_roots ();
564
565 /* Allocate an alias set for register saves & restores from stack. */
566 rs6000_sr_alias_set = new_alias_set ();
567
568 if (TARGET_TOC)
569 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
570
571 /* We can only guarantee the availability of DI pseudo-ops when
572 assembling for 64-bit targets. */
573 if (!TARGET_64BIT)
574 {
575 targetm.asm_out.aligned_op.di = NULL;
576 targetm.asm_out.unaligned_op.di = NULL;
577 }
578
579 /* Arrange to save and restore machine status around nested functions. */
580 init_machine_status = rs6000_init_machine_status;
581 free_machine_status = rs6000_free_machine_status;
582 }
583
584 /* Handle -mabi= options. */
585 static void
586 rs6000_parse_abi_options ()
587 {
588 if (rs6000_abi_string == 0)
589 return;
590 else if (! strcmp (rs6000_abi_string, "altivec"))
591 rs6000_altivec_abi = 1;
592 else
593 error ("unknown ABI specified: '%s'", rs6000_abi_string);
594 }
595
596 void
597 optimization_options (level, size)
598 int level ATTRIBUTE_UNUSED;
599 int size ATTRIBUTE_UNUSED;
600 {
601 }
602 \f
603 /* Do anything needed at the start of the asm file. */
604
605 void
606 rs6000_file_start (file, default_cpu)
607 FILE *file;
608 const char *default_cpu;
609 {
610 size_t i;
611 char buffer[80];
612 const char *start = buffer;
613 struct rs6000_cpu_select *ptr;
614
615 if (flag_verbose_asm)
616 {
617 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
618 rs6000_select[0].string = default_cpu;
619
620 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
621 {
622 ptr = &rs6000_select[i];
623 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
624 {
625 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
626 start = "";
627 }
628 }
629
630 #ifdef USING_ELFOS_H
631 switch (rs6000_sdata)
632 {
633 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
634 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
635 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
636 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
637 }
638
639 if (rs6000_sdata && g_switch_value)
640 {
641 fprintf (file, "%s -G %d", start, g_switch_value);
642 start = "";
643 }
644 #endif
645
646 if (*start == '\0')
647 putc ('\n', file);
648 }
649 }
650
651 \f
652 /* Create a CONST_DOUBLE from a string. */
653
654 struct rtx_def *
655 rs6000_float_const (string, mode)
656 const char *string;
657 enum machine_mode mode;
658 {
659 REAL_VALUE_TYPE value;
660 value = REAL_VALUE_ATOF (string, mode);
661 return immed_real_const_1 (value, mode);
662 }
663 \f
664 /* Return non-zero if this function is known to have a null epilogue. */
665
666 int
667 direct_return ()
668 {
669 if (reload_completed)
670 {
671 rs6000_stack_t *info = rs6000_stack_info ();
672
673 if (info->first_gp_reg_save == 32
674 && info->first_fp_reg_save == 64
675 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
676 && ! info->lr_save_p
677 && ! info->cr_save_p
678 && info->vrsave_mask == 0
679 && ! info->push_p)
680 return 1;
681 }
682
683 return 0;
684 }
685
686 /* Returns 1 always. */
687
688 int
689 any_operand (op, mode)
690 rtx op ATTRIBUTE_UNUSED;
691 enum machine_mode mode ATTRIBUTE_UNUSED;
692 {
693 return 1;
694 }
695
696 /* Returns 1 if op is the count register. */
697 int
698 count_register_operand (op, mode)
699 rtx op;
700 enum machine_mode mode ATTRIBUTE_UNUSED;
701 {
702 if (GET_CODE (op) != REG)
703 return 0;
704
705 if (REGNO (op) == COUNT_REGISTER_REGNUM)
706 return 1;
707
708 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
709 return 1;
710
711 return 0;
712 }
713
714 /* Returns 1 if op is an altivec register. */
715 int
716 altivec_register_operand (op, mode)
717 rtx op;
718 enum machine_mode mode ATTRIBUTE_UNUSED;
719 {
720
721 return (register_operand (op, mode)
722 && (GET_CODE (op) != REG
723 || REGNO (op) > FIRST_PSEUDO_REGISTER
724 || ALTIVEC_REGNO_P (REGNO (op))));
725 }
726
727 int
728 xer_operand (op, mode)
729 rtx op;
730 enum machine_mode mode ATTRIBUTE_UNUSED;
731 {
732 if (GET_CODE (op) != REG)
733 return 0;
734
735 if (XER_REGNO_P (REGNO (op)))
736 return 1;
737
738 return 0;
739 }
740
741 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
742 by such constants completes more quickly. */
743
744 int
745 s8bit_cint_operand (op, mode)
746 rtx op;
747 enum machine_mode mode ATTRIBUTE_UNUSED;
748 {
749 return ( GET_CODE (op) == CONST_INT
750 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
751 }
752
753 /* Return 1 if OP is a constant that can fit in a D field. */
754
755 int
756 short_cint_operand (op, mode)
757 rtx op;
758 enum machine_mode mode ATTRIBUTE_UNUSED;
759 {
760 return (GET_CODE (op) == CONST_INT
761 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
762 }
763
764 /* Similar for an unsigned D field. */
765
766 int
767 u_short_cint_operand (op, mode)
768 rtx op;
769 enum machine_mode mode ATTRIBUTE_UNUSED;
770 {
771 return (GET_CODE (op) == CONST_INT
772 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
773 }
774
775 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
776
777 int
778 non_short_cint_operand (op, mode)
779 rtx op;
780 enum machine_mode mode ATTRIBUTE_UNUSED;
781 {
782 return (GET_CODE (op) == CONST_INT
783 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
784 }
785
786 /* Returns 1 if OP is a CONST_INT that is a positive value
787 and an exact power of 2. */
788
789 int
790 exact_log2_cint_operand (op, mode)
791 rtx op;
792 enum machine_mode mode ATTRIBUTE_UNUSED;
793 {
794 return (GET_CODE (op) == CONST_INT
795 && INTVAL (op) > 0
796 && exact_log2 (INTVAL (op)) >= 0);
797 }
798
799 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
800 ctr, or lr). */
801
802 int
803 gpc_reg_operand (op, mode)
804 rtx op;
805 enum machine_mode mode;
806 {
807 return (register_operand (op, mode)
808 && (GET_CODE (op) != REG
809 || (REGNO (op) >= ARG_POINTER_REGNUM
810 && !XER_REGNO_P (REGNO (op)))
811 || REGNO (op) < MQ_REGNO));
812 }
813
814 /* Returns 1 if OP is either a pseudo-register or a register denoting a
815 CR field. */
816
817 int
818 cc_reg_operand (op, mode)
819 rtx op;
820 enum machine_mode mode;
821 {
822 return (register_operand (op, mode)
823 && (GET_CODE (op) != REG
824 || REGNO (op) >= FIRST_PSEUDO_REGISTER
825 || CR_REGNO_P (REGNO (op))));
826 }
827
828 /* Returns 1 if OP is either a pseudo-register or a register denoting a
829 CR field that isn't CR0. */
830
831 int
832 cc_reg_not_cr0_operand (op, mode)
833 rtx op;
834 enum machine_mode mode;
835 {
836 return (register_operand (op, mode)
837 && (GET_CODE (op) != REG
838 || REGNO (op) >= FIRST_PSEUDO_REGISTER
839 || CR_REGNO_NOT_CR0_P (REGNO (op))));
840 }
841
842 /* Returns 1 if OP is either a constant integer valid for a D-field or
843 a non-special register. If a register, it must be in the proper
844 mode unless MODE is VOIDmode. */
845
846 int
847 reg_or_short_operand (op, mode)
848 rtx op;
849 enum machine_mode mode;
850 {
851 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
852 }
853
854 /* Similar, except check if the negation of the constant would be
855 valid for a D-field. */
856
857 int
858 reg_or_neg_short_operand (op, mode)
859 rtx op;
860 enum machine_mode mode;
861 {
862 if (GET_CODE (op) == CONST_INT)
863 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
864
865 return gpc_reg_operand (op, mode);
866 }
867
868 /* Returns 1 if OP is either a constant integer valid for a DS-field or
869 a non-special register. If a register, it must be in the proper
870 mode unless MODE is VOIDmode. */
871
872 int
873 reg_or_aligned_short_operand (op, mode)
874 rtx op;
875 enum machine_mode mode;
876 {
877 if (gpc_reg_operand (op, mode))
878 return 1;
879 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
880 return 1;
881
882 return 0;
883 }
884
885
886 /* Return 1 if the operand is either a register or an integer whose
887 high-order 16 bits are zero. */
888
889 int
890 reg_or_u_short_operand (op, mode)
891 rtx op;
892 enum machine_mode mode;
893 {
894 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
895 }
896
897 /* Return 1 is the operand is either a non-special register or ANY
898 constant integer. */
899
900 int
901 reg_or_cint_operand (op, mode)
902 rtx op;
903 enum machine_mode mode;
904 {
905 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
906 }
907
908 /* Return 1 is the operand is either a non-special register or ANY
909 32-bit signed constant integer. */
910
911 int
912 reg_or_arith_cint_operand (op, mode)
913 rtx op;
914 enum machine_mode mode;
915 {
916 return (gpc_reg_operand (op, mode)
917 || (GET_CODE (op) == CONST_INT
918 #if HOST_BITS_PER_WIDE_INT != 32
919 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
920 < (unsigned HOST_WIDE_INT) 0x100000000ll)
921 #endif
922 ));
923 }
924
925 /* Return 1 is the operand is either a non-special register or a 32-bit
926 signed constant integer valid for 64-bit addition. */
927
928 int
929 reg_or_add_cint64_operand (op, mode)
930 rtx op;
931 enum machine_mode mode;
932 {
933 return (gpc_reg_operand (op, mode)
934 || (GET_CODE (op) == CONST_INT
935 && INTVAL (op) < 0x7fff8000
936 #if HOST_BITS_PER_WIDE_INT != 32
937 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
938 < 0x100000000ll)
939 #endif
940 ));
941 }
942
943 /* Return 1 is the operand is either a non-special register or a 32-bit
944 signed constant integer valid for 64-bit subtraction. */
945
946 int
947 reg_or_sub_cint64_operand (op, mode)
948 rtx op;
949 enum machine_mode mode;
950 {
951 return (gpc_reg_operand (op, mode)
952 || (GET_CODE (op) == CONST_INT
953 && (- INTVAL (op)) < 0x7fff8000
954 #if HOST_BITS_PER_WIDE_INT != 32
955 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
956 < 0x100000000ll)
957 #endif
958 ));
959 }
960
961 /* Return 1 is the operand is either a non-special register or ANY
962 32-bit unsigned constant integer. */
963
964 int
965 reg_or_logical_cint_operand (op, mode)
966 rtx op;
967 enum machine_mode mode;
968 {
969 if (GET_CODE (op) == CONST_INT)
970 {
971 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
972 {
973 if (GET_MODE_BITSIZE (mode) <= 32)
974 abort ();
975
976 if (INTVAL (op) < 0)
977 return 0;
978 }
979
980 return ((INTVAL (op) & GET_MODE_MASK (mode)
981 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
982 }
983 else if (GET_CODE (op) == CONST_DOUBLE)
984 {
985 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
986 || mode != DImode)
987 abort ();
988
989 return CONST_DOUBLE_HIGH (op) == 0;
990 }
991 else
992 return gpc_reg_operand (op, mode);
993 }
994
995 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
996
997 int
998 got_operand (op, mode)
999 rtx op;
1000 enum machine_mode mode ATTRIBUTE_UNUSED;
1001 {
1002 return (GET_CODE (op) == SYMBOL_REF
1003 || GET_CODE (op) == CONST
1004 || GET_CODE (op) == LABEL_REF);
1005 }
1006
1007 /* Return 1 if the operand is a simple references that can be loaded via
1008 the GOT (labels involving addition aren't allowed). */
1009
1010 int
1011 got_no_const_operand (op, mode)
1012 rtx op;
1013 enum machine_mode mode ATTRIBUTE_UNUSED;
1014 {
1015 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1016 }
1017
1018 /* Return the number of instructions it takes to form a constant in an
1019 integer register. */
1020
1021 static int
1022 num_insns_constant_wide (value)
1023 HOST_WIDE_INT value;
1024 {
1025 /* signed constant loadable with {cal|addi} */
1026 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1027 return 1;
1028
1029 /* constant loadable with {cau|addis} */
1030 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1031 return 1;
1032
1033 #if HOST_BITS_PER_WIDE_INT == 64
1034 else if (TARGET_POWERPC64)
1035 {
1036 HOST_WIDE_INT low = value & 0xffffffff;
1037 HOST_WIDE_INT high = value >> 32;
1038
1039 low = (low ^ 0x80000000) - 0x80000000; /* sign extend */
1040
1041 if (high == 0 && (low & 0x80000000) == 0)
1042 return 2;
1043
1044 else if (high == -1 && (low & 0x80000000) != 0)
1045 return 2;
1046
1047 else if (! low)
1048 return num_insns_constant_wide (high) + 1;
1049
1050 else
1051 return (num_insns_constant_wide (high)
1052 + num_insns_constant_wide (low) + 1);
1053 }
1054 #endif
1055
1056 else
1057 return 2;
1058 }
1059
1060 int
1061 num_insns_constant (op, mode)
1062 rtx op;
1063 enum machine_mode mode;
1064 {
1065 if (GET_CODE (op) == CONST_INT)
1066 {
1067 #if HOST_BITS_PER_WIDE_INT == 64
1068 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1069 && mask64_operand (op, mode))
1070 return 2;
1071 else
1072 #endif
1073 return num_insns_constant_wide (INTVAL (op));
1074 }
1075
1076 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1077 {
1078 long l;
1079 REAL_VALUE_TYPE rv;
1080
1081 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1082 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1083 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1084 }
1085
1086 else if (GET_CODE (op) == CONST_DOUBLE)
1087 {
1088 HOST_WIDE_INT low;
1089 HOST_WIDE_INT high;
1090 long l[2];
1091 REAL_VALUE_TYPE rv;
1092 int endian = (WORDS_BIG_ENDIAN == 0);
1093
1094 if (mode == VOIDmode || mode == DImode)
1095 {
1096 high = CONST_DOUBLE_HIGH (op);
1097 low = CONST_DOUBLE_LOW (op);
1098 }
1099 else
1100 {
1101 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1102 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1103 high = l[endian];
1104 low = l[1 - endian];
1105 }
1106
1107 if (TARGET_32BIT)
1108 return (num_insns_constant_wide (low)
1109 + num_insns_constant_wide (high));
1110
1111 else
1112 {
1113 if (high == 0 && low >= 0)
1114 return num_insns_constant_wide (low);
1115
1116 else if (high == -1 && low < 0)
1117 return num_insns_constant_wide (low);
1118
1119 else if (mask64_operand (op, mode))
1120 return 2;
1121
1122 else if (low == 0)
1123 return num_insns_constant_wide (high) + 1;
1124
1125 else
1126 return (num_insns_constant_wide (high)
1127 + num_insns_constant_wide (low) + 1);
1128 }
1129 }
1130
1131 else
1132 abort ();
1133 }
1134
1135 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1136 register with one instruction per word. We only do this if we can
1137 safely read CONST_DOUBLE_{LOW,HIGH}. */
1138
1139 int
1140 easy_fp_constant (op, mode)
1141 rtx op;
1142 enum machine_mode mode;
1143 {
1144 if (GET_CODE (op) != CONST_DOUBLE
1145 || GET_MODE (op) != mode
1146 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1147 return 0;
1148
1149 /* Consider all constants with -msoft-float to be easy. */
1150 if (TARGET_SOFT_FLOAT && mode != DImode)
1151 return 1;
1152
1153 /* If we are using V.4 style PIC, consider all constants to be hard. */
1154 if (flag_pic && DEFAULT_ABI == ABI_V4)
1155 return 0;
1156
1157 #ifdef TARGET_RELOCATABLE
1158 /* Similarly if we are using -mrelocatable, consider all constants
1159 to be hard. */
1160 if (TARGET_RELOCATABLE)
1161 return 0;
1162 #endif
1163
1164 if (mode == DFmode)
1165 {
1166 long k[2];
1167 REAL_VALUE_TYPE rv;
1168
1169 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1170 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1171
1172 return (num_insns_constant_wide ((HOST_WIDE_INT)k[0]) == 1
1173 && num_insns_constant_wide ((HOST_WIDE_INT)k[1]) == 1);
1174 }
1175
1176 else if (mode == SFmode)
1177 {
1178 long l;
1179 REAL_VALUE_TYPE rv;
1180
1181 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1182 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1183
1184 return num_insns_constant_wide (l) == 1;
1185 }
1186
1187 else if (mode == DImode)
1188 return ((TARGET_POWERPC64
1189 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1190 || (num_insns_constant (op, DImode) <= 2));
1191
1192 else if (mode == SImode)
1193 return 1;
1194 else
1195 abort ();
1196 }
1197
1198 /* Return 1 if the operand is a CONST_INT and can be put into a
1199 register with one instruction. */
1200
1201 static int
1202 easy_vector_constant (op)
1203 rtx op;
1204 {
1205 rtx elt;
1206 int units, i;
1207
1208 if (GET_CODE (op) != CONST_VECTOR)
1209 return 0;
1210
1211 units = CONST_VECTOR_NUNITS (op);
1212
1213 /* We can generate 0 easily. Look for that. */
1214 for (i = 0; i < units; ++i)
1215 {
1216 elt = CONST_VECTOR_ELT (op, i);
1217
1218 /* We could probably simplify this by just checking for equality
1219 with CONST0_RTX for the current mode, but let's be safe
1220 instead. */
1221
1222 if (GET_CODE (elt) == CONST_INT && INTVAL (elt) != 0)
1223 return 0;
1224
1225 if (GET_CODE (elt) == CONST_DOUBLE
1226 && (CONST_DOUBLE_LOW (elt) != 0
1227 || CONST_DOUBLE_HIGH (elt) != 0))
1228 return 0;
1229 }
1230
1231 /* We could probably generate a few other constants trivially, but
1232 gcc doesn't generate them yet. FIXME later. */
1233 return 0;
1234 }
1235
1236 /* Return 1 if the operand is the constant 0. This works for scalars
1237 as well as vectors. */
1238 int
1239 zero_constant (op, mode)
1240 rtx op;
1241 enum machine_mode mode;
1242 {
1243 return op == CONST0_RTX (mode);
1244 }
1245
1246 /* Return 1 if the operand is 0.0. */
1247 int
1248 zero_fp_constant (op, mode)
1249 rtx op;
1250 enum machine_mode mode;
1251 {
1252 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1253 }
1254
1255 /* Return 1 if the operand is in volatile memory. Note that during
1256 the RTL generation phase, memory_operand does not return TRUE for
1257 volatile memory references. So this function allows us to
1258 recognize volatile references where its safe. */
1259
1260 int
1261 volatile_mem_operand (op, mode)
1262 rtx op;
1263 enum machine_mode mode;
1264 {
1265 if (GET_CODE (op) != MEM)
1266 return 0;
1267
1268 if (!MEM_VOLATILE_P (op))
1269 return 0;
1270
1271 if (mode != GET_MODE (op))
1272 return 0;
1273
1274 if (reload_completed)
1275 return memory_operand (op, mode);
1276
1277 if (reload_in_progress)
1278 return strict_memory_address_p (mode, XEXP (op, 0));
1279
1280 return memory_address_p (mode, XEXP (op, 0));
1281 }
1282
1283 /* Return 1 if the operand is an offsettable memory operand. */
1284
1285 int
1286 offsettable_mem_operand (op, mode)
1287 rtx op;
1288 enum machine_mode mode;
1289 {
1290 return ((GET_CODE (op) == MEM)
1291 && offsettable_address_p (reload_completed || reload_in_progress,
1292 mode, XEXP (op, 0)));
1293 }
1294
1295 /* Return 1 if the operand is either an easy FP constant (see above) or
1296 memory. */
1297
1298 int
1299 mem_or_easy_const_operand (op, mode)
1300 rtx op;
1301 enum machine_mode mode;
1302 {
1303 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1304 }
1305
1306 /* Return 1 if the operand is either a non-special register or an item
1307 that can be used as the operand of a `mode' add insn. */
1308
1309 int
1310 add_operand (op, mode)
1311 rtx op;
1312 enum machine_mode mode;
1313 {
1314 if (GET_CODE (op) == CONST_INT)
1315 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1316 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1317
1318 return gpc_reg_operand (op, mode);
1319 }
1320
1321 /* Return 1 if OP is a constant but not a valid add_operand. */
1322
1323 int
1324 non_add_cint_operand (op, mode)
1325 rtx op;
1326 enum machine_mode mode ATTRIBUTE_UNUSED;
1327 {
1328 return (GET_CODE (op) == CONST_INT
1329 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1330 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1331 }
1332
1333 /* Return 1 if the operand is a non-special register or a constant that
1334 can be used as the operand of an OR or XOR insn on the RS/6000. */
1335
1336 int
1337 logical_operand (op, mode)
1338 rtx op;
1339 enum machine_mode mode;
1340 {
1341 HOST_WIDE_INT opl, oph;
1342
1343 if (gpc_reg_operand (op, mode))
1344 return 1;
1345
1346 if (GET_CODE (op) == CONST_INT)
1347 {
1348 opl = INTVAL (op) & GET_MODE_MASK (mode);
1349
1350 #if HOST_BITS_PER_WIDE_INT <= 32
1351 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1352 return 0;
1353 #endif
1354 }
1355 else if (GET_CODE (op) == CONST_DOUBLE)
1356 {
1357 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1358 abort ();
1359
1360 opl = CONST_DOUBLE_LOW (op);
1361 oph = CONST_DOUBLE_HIGH (op);
1362 if (oph != 0)
1363 return 0;
1364 }
1365 else
1366 return 0;
1367
1368 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1369 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1370 }
1371
1372 /* Return 1 if C is a constant that is not a logical operand (as
1373 above), but could be split into one. */
1374
1375 int
1376 non_logical_cint_operand (op, mode)
1377 rtx op;
1378 enum machine_mode mode;
1379 {
1380 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1381 && ! logical_operand (op, mode)
1382 && reg_or_logical_cint_operand (op, mode));
1383 }
1384
1385 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1386 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1387 Reject all ones and all zeros, since these should have been optimized
1388 away and confuse the making of MB and ME. */
1389
1390 int
1391 mask_operand (op, mode)
1392 rtx op;
1393 enum machine_mode mode ATTRIBUTE_UNUSED;
1394 {
1395 HOST_WIDE_INT c, lsb;
1396
1397 if (GET_CODE (op) != CONST_INT)
1398 return 0;
1399
1400 c = INTVAL (op);
1401
1402 /* Fail in 64-bit mode if the mask wraps around because the upper
1403 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1404 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1405 return 0;
1406
1407 /* We don't change the number of transitions by inverting,
1408 so make sure we start with the LS bit zero. */
1409 if (c & 1)
1410 c = ~c;
1411
1412 /* Reject all zeros or all ones. */
1413 if (c == 0)
1414 return 0;
1415
1416 /* Find the first transition. */
1417 lsb = c & -c;
1418
1419 /* Invert to look for a second transition. */
1420 c = ~c;
1421
1422 /* Erase first transition. */
1423 c &= -lsb;
1424
1425 /* Find the second transition (if any). */
1426 lsb = c & -c;
1427
1428 /* Match if all the bits above are 1's (or c is zero). */
1429 return c == -lsb;
1430 }
1431
1432 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1433 It is if there are no more than one 1->0 or 0->1 transitions.
1434 Reject all ones and all zeros, since these should have been optimized
1435 away and confuse the making of MB and ME. */
1436
1437 int
1438 mask64_operand (op, mode)
1439 rtx op;
1440 enum machine_mode mode;
1441 {
1442 if (GET_CODE (op) == CONST_INT)
1443 {
1444 HOST_WIDE_INT c, lsb;
1445
1446 /* We don't change the number of transitions by inverting,
1447 so make sure we start with the LS bit zero. */
1448 c = INTVAL (op);
1449 if (c & 1)
1450 c = ~c;
1451
1452 /* Reject all zeros or all ones. */
1453 if (c == 0)
1454 return 0;
1455
1456 /* Find the transition, and check that all bits above are 1's. */
1457 lsb = c & -c;
1458 return c == -lsb;
1459 }
1460 else if (GET_CODE (op) == CONST_DOUBLE
1461 && (mode == VOIDmode || mode == DImode))
1462 {
1463 HOST_WIDE_INT low, high, lsb;
1464
1465 if (HOST_BITS_PER_WIDE_INT < 64)
1466 high = CONST_DOUBLE_HIGH (op);
1467
1468 low = CONST_DOUBLE_LOW (op);
1469 if (low & 1)
1470 {
1471 if (HOST_BITS_PER_WIDE_INT < 64)
1472 high = ~high;
1473 low = ~low;
1474 }
1475
1476 if (low == 0)
1477 {
1478 if (HOST_BITS_PER_WIDE_INT >= 64 || high == 0)
1479 return 0;
1480
1481 lsb = high & -high;
1482 return high == -lsb;
1483 }
1484
1485 lsb = low & -low;
1486 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
1487 }
1488 else
1489 return 0;
1490 }
1491
1492 /* Return 1 if the operand is either a non-special register or a constant
1493 that can be used as the operand of a PowerPC64 logical AND insn. */
1494
1495 int
1496 and64_operand (op, mode)
1497 rtx op;
1498 enum machine_mode mode;
1499 {
1500 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1501 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1502
1503 return (logical_operand (op, mode) || mask64_operand (op, mode));
1504 }
1505
1506 /* Return 1 if the operand is either a non-special register or a
1507 constant that can be used as the operand of an RS/6000 logical AND insn. */
1508
1509 int
1510 and_operand (op, mode)
1511 rtx op;
1512 enum machine_mode mode;
1513 {
1514 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1515 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1516
1517 return (logical_operand (op, mode) || mask_operand (op, mode));
1518 }
1519
1520 /* Return 1 if the operand is a general register or memory operand. */
1521
1522 int
1523 reg_or_mem_operand (op, mode)
1524 rtx op;
1525 enum machine_mode mode;
1526 {
1527 return (gpc_reg_operand (op, mode)
1528 || memory_operand (op, mode)
1529 || volatile_mem_operand (op, mode));
1530 }
1531
1532 /* Return 1 if the operand is a general register or memory operand without
1533 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1534 instruction. */
1535
1536 int
1537 lwa_operand (op, mode)
1538 rtx op;
1539 enum machine_mode mode;
1540 {
1541 rtx inner = op;
1542
1543 if (reload_completed && GET_CODE (inner) == SUBREG)
1544 inner = SUBREG_REG (inner);
1545
1546 return gpc_reg_operand (inner, mode)
1547 || (memory_operand (inner, mode)
1548 && GET_CODE (XEXP (inner, 0)) != PRE_INC
1549 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
1550 && (GET_CODE (XEXP (inner, 0)) != PLUS
1551 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1552 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1553 }
1554
1555 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1556 to CALL. This is a SYMBOL_REF or a pseudo-register, which will be
1557 forced to lr. */
1558
1559 int
1560 call_operand (op, mode)
1561 rtx op;
1562 enum machine_mode mode;
1563 {
1564 if (mode != VOIDmode && GET_MODE (op) != mode)
1565 return 0;
1566
1567 return (GET_CODE (op) == SYMBOL_REF
1568 || (GET_CODE (op) == REG && REGNO (op) >= FIRST_PSEUDO_REGISTER));
1569 }
1570
1571 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1572 this file and the function is not weakly defined. */
1573
1574 int
1575 current_file_function_operand (op, mode)
1576 rtx op;
1577 enum machine_mode mode ATTRIBUTE_UNUSED;
1578 {
1579 return (GET_CODE (op) == SYMBOL_REF
1580 && (SYMBOL_REF_FLAG (op)
1581 || (op == XEXP (DECL_RTL (current_function_decl), 0)
1582 && ! DECL_WEAK (current_function_decl))));
1583 }
1584
1585 /* Return 1 if this operand is a valid input for a move insn. */
1586
1587 int
1588 input_operand (op, mode)
1589 rtx op;
1590 enum machine_mode mode;
1591 {
1592 /* Memory is always valid. */
1593 if (memory_operand (op, mode))
1594 return 1;
1595
1596 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1597 if (GET_CODE (op) == CONSTANT_P_RTX)
1598 return 1;
1599
1600 /* For floating-point, easy constants are valid. */
1601 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1602 && CONSTANT_P (op)
1603 && easy_fp_constant (op, mode))
1604 return 1;
1605
1606 /* Allow any integer constant. */
1607 if (GET_MODE_CLASS (mode) == MODE_INT
1608 && (GET_CODE (op) == CONST_INT
1609 || GET_CODE (op) == CONST_DOUBLE))
1610 return 1;
1611
1612 /* For floating-point or multi-word mode, the only remaining valid type
1613 is a register. */
1614 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1615 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1616 return register_operand (op, mode);
1617
1618 /* The only cases left are integral modes one word or smaller (we
1619 do not get called for MODE_CC values). These can be in any
1620 register. */
1621 if (register_operand (op, mode))
1622 return 1;
1623
1624 /* A SYMBOL_REF referring to the TOC is valid. */
1625 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
1626 return 1;
1627
1628 /* A constant pool expression (relative to the TOC) is valid */
1629 if (TOC_RELATIVE_EXPR_P (op))
1630 return 1;
1631
1632 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1633 to be valid. */
1634 if (DEFAULT_ABI == ABI_V4
1635 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
1636 && small_data_operand (op, Pmode))
1637 return 1;
1638
1639 return 0;
1640 }
1641
1642 /* Return 1 for an operand in small memory on V.4/eabi. */
1643
1644 int
1645 small_data_operand (op, mode)
1646 rtx op ATTRIBUTE_UNUSED;
1647 enum machine_mode mode ATTRIBUTE_UNUSED;
1648 {
1649 #if TARGET_ELF
1650 rtx sym_ref;
1651
1652 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
1653 return 0;
1654
1655 if (DEFAULT_ABI != ABI_V4)
1656 return 0;
1657
1658 if (GET_CODE (op) == SYMBOL_REF)
1659 sym_ref = op;
1660
1661 else if (GET_CODE (op) != CONST
1662 || GET_CODE (XEXP (op, 0)) != PLUS
1663 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
1664 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
1665 return 0;
1666
1667 else
1668 {
1669 rtx sum = XEXP (op, 0);
1670 HOST_WIDE_INT summand;
1671
1672 /* We have to be careful here, because it is the referenced address
1673 that must be 32k from _SDA_BASE_, not just the symbol. */
1674 summand = INTVAL (XEXP (sum, 1));
1675 if (summand < 0 || summand > g_switch_value)
1676 return 0;
1677
1678 sym_ref = XEXP (sum, 0);
1679 }
1680
1681 if (*XSTR (sym_ref, 0) != '@')
1682 return 0;
1683
1684 return 1;
1685
1686 #else
1687 return 0;
1688 #endif
1689 }
1690 \f
1691 static int
1692 constant_pool_expr_1 (op, have_sym, have_toc)
1693 rtx op;
1694 int *have_sym;
1695 int *have_toc;
1696 {
1697 switch (GET_CODE(op))
1698 {
1699 case SYMBOL_REF:
1700 if (CONSTANT_POOL_ADDRESS_P (op))
1701 {
1702 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
1703 {
1704 *have_sym = 1;
1705 return 1;
1706 }
1707 else
1708 return 0;
1709 }
1710 else if (! strcmp (XSTR (op, 0), toc_label_name))
1711 {
1712 *have_toc = 1;
1713 return 1;
1714 }
1715 else
1716 return 0;
1717 case PLUS:
1718 case MINUS:
1719 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
1720 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
1721 case CONST:
1722 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
1723 case CONST_INT:
1724 return 1;
1725 default:
1726 return 0;
1727 }
1728 }
1729
1730 int
1731 constant_pool_expr_p (op)
1732 rtx op;
1733 {
1734 int have_sym = 0;
1735 int have_toc = 0;
1736 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
1737 }
1738
1739 int
1740 toc_relative_expr_p (op)
1741 rtx op;
1742 {
1743 int have_sym = 0;
1744 int have_toc = 0;
1745 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
1746 }
1747
1748 /* Try machine-dependent ways of modifying an illegitimate address
1749 to be legitimate. If we find one, return the new, valid address.
1750 This is used from only one place: `memory_address' in explow.c.
1751
1752 OLDX is the address as it was before break_out_memory_refs was
1753 called. In some cases it is useful to look at this to decide what
1754 needs to be done.
1755
1756 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
1757
1758 It is always safe for this function to do nothing. It exists to
1759 recognize opportunities to optimize the output.
1760
1761 On RS/6000, first check for the sum of a register with a constant
1762 integer that is out of range. If so, generate code to add the
1763 constant with the low-order 16 bits masked to the register and force
1764 this result into another register (this can be done with `cau').
1765 Then generate an address of REG+(CONST&0xffff), allowing for the
1766 possibility of bit 16 being a one.
1767
1768 Then check for the sum of a register and something not constant, try to
1769 load the other things into a register and return the sum. */
1770 rtx
1771 rs6000_legitimize_address (x, oldx, mode)
1772 rtx x;
1773 rtx oldx ATTRIBUTE_UNUSED;
1774 enum machine_mode mode;
1775 {
1776 if (GET_CODE (x) == PLUS
1777 && GET_CODE (XEXP (x, 0)) == REG
1778 && GET_CODE (XEXP (x, 1)) == CONST_INT
1779 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
1780 {
1781 HOST_WIDE_INT high_int, low_int;
1782 rtx sum;
1783 high_int = INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff);
1784 low_int = INTVAL (XEXP (x, 1)) & 0xffff;
1785 if (low_int & 0x8000)
1786 high_int += 0x10000, low_int |= ((HOST_WIDE_INT) -1) << 16;
1787 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
1788 GEN_INT (high_int)), 0);
1789 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
1790 }
1791 else if (GET_CODE (x) == PLUS
1792 && GET_CODE (XEXP (x, 0)) == REG
1793 && GET_CODE (XEXP (x, 1)) != CONST_INT
1794 && GET_MODE_NUNITS (mode) == 1
1795 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
1796 && (TARGET_POWERPC64 || mode != DImode)
1797 && mode != TImode)
1798 {
1799 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
1800 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
1801 }
1802 else if (ALTIVEC_VECTOR_MODE (mode))
1803 {
1804 rtx reg;
1805
1806 /* Make sure both operands are registers. */
1807 if (GET_CODE (x) == PLUS)
1808 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
1809 force_reg (Pmode, XEXP (x, 1)));
1810
1811 reg = force_reg (Pmode, x);
1812 return reg;
1813 }
1814 else if (TARGET_ELF && TARGET_32BIT && TARGET_NO_TOC && ! flag_pic
1815 && GET_CODE (x) != CONST_INT
1816 && GET_CODE (x) != CONST_DOUBLE
1817 && CONSTANT_P (x)
1818 && GET_MODE_NUNITS (mode) == 1
1819 && (GET_MODE_BITSIZE (mode) <= 32
1820 || (TARGET_HARD_FLOAT && mode == DFmode)))
1821 {
1822 rtx reg = gen_reg_rtx (Pmode);
1823 emit_insn (gen_elf_high (reg, (x)));
1824 return gen_rtx_LO_SUM (Pmode, reg, (x));
1825 }
1826 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
1827 && ! flag_pic
1828 && GET_CODE (x) != CONST_INT
1829 && GET_CODE (x) != CONST_DOUBLE
1830 && CONSTANT_P (x)
1831 && (TARGET_HARD_FLOAT || mode != DFmode)
1832 && mode != DImode
1833 && mode != TImode)
1834 {
1835 rtx reg = gen_reg_rtx (Pmode);
1836 emit_insn (gen_macho_high (reg, (x)));
1837 return gen_rtx_LO_SUM (Pmode, reg, (x));
1838 }
1839 else if (TARGET_TOC
1840 && CONSTANT_POOL_EXPR_P (x)
1841 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
1842 {
1843 return create_TOC_reference (x);
1844 }
1845 else
1846 return NULL_RTX;
1847 }
1848
1849 /* The convention appears to be to define this wherever it is used.
1850 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
1851 is now used here. */
1852 #ifndef REG_MODE_OK_FOR_BASE_P
1853 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
1854 #endif
1855
1856 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
1857 replace the input X, or the original X if no replacement is called for.
1858 The output parameter *WIN is 1 if the calling macro should goto WIN,
1859 0 if it should not.
1860
1861 For RS/6000, we wish to handle large displacements off a base
1862 register by splitting the addend across an addiu/addis and the mem insn.
1863 This cuts number of extra insns needed from 3 to 1.
1864
1865 On Darwin, we use this to generate code for floating point constants.
1866 A movsf_low is generated so we wind up with 2 instructions rather than 3.
1867 The Darwin code is inside #if TARGET_MACHO because only then is
1868 machopic_function_base_name() defined. */
1869 rtx
1870 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
1871 rtx x;
1872 enum machine_mode mode;
1873 int opnum;
1874 int type;
1875 int ind_levels ATTRIBUTE_UNUSED;
1876 int *win;
1877 {
1878 /* We must recognize output that we have already generated ourselves. */
1879 if (GET_CODE (x) == PLUS
1880 && GET_CODE (XEXP (x, 0)) == PLUS
1881 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
1882 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1883 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1884 {
1885 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1886 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1887 opnum, (enum reload_type)type);
1888 *win = 1;
1889 return x;
1890 }
1891
1892 #if TARGET_MACHO
1893 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
1894 && GET_CODE (x) == LO_SUM
1895 && GET_CODE (XEXP (x, 0)) == PLUS
1896 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
1897 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
1898 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
1899 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
1900 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
1901 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
1902 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
1903 {
1904 /* Result of previous invocation of this function on Darwin
1905 floating point constant. */
1906 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1907 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
1908 opnum, (enum reload_type)type);
1909 *win = 1;
1910 return x;
1911 }
1912 #endif
1913 if (GET_CODE (x) == PLUS
1914 && GET_CODE (XEXP (x, 0)) == REG
1915 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
1916 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
1917 && GET_CODE (XEXP (x, 1)) == CONST_INT
1918 && !ALTIVEC_VECTOR_MODE (mode))
1919 {
1920 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
1921 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
1922 HOST_WIDE_INT high
1923 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
1924
1925 /* Check for 32-bit overflow. */
1926 if (high + low != val)
1927 {
1928 *win = 0;
1929 return x;
1930 }
1931
1932 /* Reload the high part into a base reg; leave the low part
1933 in the mem directly. */
1934
1935 x = gen_rtx_PLUS (GET_MODE (x),
1936 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
1937 GEN_INT (high)),
1938 GEN_INT (low));
1939
1940 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1941 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1942 opnum, (enum reload_type)type);
1943 *win = 1;
1944 return x;
1945 }
1946 #if TARGET_MACHO
1947 if (GET_CODE (x) == SYMBOL_REF
1948 && DEFAULT_ABI == ABI_DARWIN
1949 && !ALTIVEC_VECTOR_MODE (mode)
1950 && flag_pic)
1951 {
1952 /* Darwin load of floating point constant. */
1953 rtx offset = gen_rtx (CONST, Pmode,
1954 gen_rtx (MINUS, Pmode, x,
1955 gen_rtx (SYMBOL_REF, Pmode,
1956 machopic_function_base_name ())));
1957 x = gen_rtx (LO_SUM, GET_MODE (x),
1958 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
1959 gen_rtx (HIGH, Pmode, offset)), offset);
1960 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1961 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
1962 opnum, (enum reload_type)type);
1963 *win = 1;
1964 return x;
1965 }
1966 #endif
1967 if (TARGET_TOC
1968 && CONSTANT_POOL_EXPR_P (x)
1969 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
1970 {
1971 (x) = create_TOC_reference (x);
1972 *win = 1;
1973 return x;
1974 }
1975 *win = 0;
1976 return x;
1977 }
1978
1979 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
1980 that is a valid memory address for an instruction.
1981 The MODE argument is the machine mode for the MEM expression
1982 that wants to use this address.
1983
1984 On the RS/6000, there are four valid address: a SYMBOL_REF that
1985 refers to a constant pool entry of an address (or the sum of it
1986 plus a constant), a short (16-bit signed) constant plus a register,
1987 the sum of two registers, or a register indirect, possibly with an
1988 auto-increment. For DFmode and DImode with an constant plus register,
1989 we must ensure that both words are addressable or PowerPC64 with offset
1990 word aligned.
1991
1992 For modes spanning multiple registers (DFmode in 32-bit GPRs,
1993 32-bit DImode, TImode), indexed addressing cannot be used because
1994 adjacent memory cells are accessed by adding word-sized offsets
1995 during assembly output. */
1996 int
1997 rs6000_legitimate_address (mode, x, reg_ok_strict)
1998 enum machine_mode mode;
1999 rtx x;
2000 int reg_ok_strict;
2001 {
2002 if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
2003 return 1;
2004 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2005 && TARGET_UPDATE
2006 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
2007 return 1;
2008 if (LEGITIMATE_SMALL_DATA_P (mode, x))
2009 return 1;
2010 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
2011 return 1;
2012 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2013 if (! reg_ok_strict
2014 && GET_CODE (x) == PLUS
2015 && GET_CODE (XEXP (x, 0)) == REG
2016 && XEXP (x, 0) == virtual_stack_vars_rtx
2017 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2018 return 1;
2019 if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
2020 return 1;
2021 if (mode != TImode
2022 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
2023 && (TARGET_POWERPC64 || mode != DImode)
2024 && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
2025 return 1;
2026 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
2027 return 1;
2028 return 0;
2029 }
2030 \f
2031 /* Try to output insns to set TARGET equal to the constant C if it can
2032 be done in less than N insns. Do all computations in MODE.
2033 Returns the place where the output has been placed if it can be
2034 done and the insns have been emitted. If it would take more than N
2035 insns, zero is returned and no insns and emitted. */
2036
2037 rtx
2038 rs6000_emit_set_const (dest, mode, source, n)
2039 rtx dest, source;
2040 enum machine_mode mode;
2041 int n ATTRIBUTE_UNUSED;
2042 {
2043 HOST_WIDE_INT c0, c1;
2044
2045 if (mode == QImode || mode == HImode || mode == SImode)
2046 {
2047 if (dest == NULL)
2048 dest = gen_reg_rtx (mode);
2049 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
2050 return dest;
2051 }
2052
2053 if (GET_CODE (source) == CONST_INT)
2054 {
2055 c0 = INTVAL (source);
2056 c1 = -(c0 < 0);
2057 }
2058 else if (GET_CODE (source) == CONST_DOUBLE)
2059 {
2060 #if HOST_BITS_PER_WIDE_INT >= 64
2061 c0 = CONST_DOUBLE_LOW (source);
2062 c1 = -(c0 < 0);
2063 #else
2064 c0 = CONST_DOUBLE_LOW (source);
2065 c1 = CONST_DOUBLE_HIGH (source);
2066 #endif
2067 }
2068 else
2069 abort ();
2070
2071 return rs6000_emit_set_long_const (dest, c0, c1);
2072 }
2073
2074 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2075 fall back to a straight forward decomposition. We do this to avoid
2076 exponential run times encountered when looking for longer sequences
2077 with rs6000_emit_set_const. */
2078 static rtx
2079 rs6000_emit_set_long_const (dest, c1, c2)
2080 rtx dest;
2081 HOST_WIDE_INT c1, c2;
2082 {
2083 if (!TARGET_POWERPC64)
2084 {
2085 rtx operand1, operand2;
2086
2087 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2088 DImode);
2089 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2090 DImode);
2091 emit_move_insn (operand1, GEN_INT (c1));
2092 emit_move_insn (operand2, GEN_INT (c2));
2093 }
2094 else
2095 {
2096 HOST_WIDE_INT ud1, ud2, ud3, ud4;
2097
2098 ud1 = c1 & 0xffff;
2099 ud2 = (c1 & 0xffff0000) >> 16;
2100 #if HOST_BITS_PER_WIDE_INT >= 64
2101 c2 = c1 >> 32;
2102 #endif
2103 ud3 = c2 & 0xffff;
2104 ud4 = (c2 & 0xffff0000) >> 16;
2105
2106 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2107 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2108 {
2109 if (ud1 & 0x8000)
2110 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
2111 else
2112 emit_move_insn (dest, GEN_INT (ud1));
2113 }
2114
2115 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2116 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2117 {
2118 if (ud2 & 0x8000)
2119 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2120 - 0x80000000));
2121 else
2122 emit_move_insn (dest, GEN_INT (ud2 << 16));
2123 if (ud1 != 0)
2124 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2125 }
2126 else if ((ud4 == 0xffff && (ud3 & 0x8000))
2127 || (ud4 == 0 && ! (ud3 & 0x8000)))
2128 {
2129 if (ud3 & 0x8000)
2130 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2131 - 0x80000000));
2132 else
2133 emit_move_insn (dest, GEN_INT (ud3 << 16));
2134
2135 if (ud2 != 0)
2136 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2137 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2138 if (ud1 != 0)
2139 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2140 }
2141 else
2142 {
2143 if (ud4 & 0x8000)
2144 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2145 - 0x80000000));
2146 else
2147 emit_move_insn (dest, GEN_INT (ud4 << 16));
2148
2149 if (ud3 != 0)
2150 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2151
2152 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2153 if (ud2 != 0)
2154 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2155 GEN_INT (ud2 << 16)));
2156 if (ud1 != 0)
2157 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2158 }
2159 }
2160 return dest;
2161 }
2162
2163 /* Emit a move from SOURCE to DEST in mode MODE. */
2164 void
2165 rs6000_emit_move (dest, source, mode)
2166 rtx dest;
2167 rtx source;
2168 enum machine_mode mode;
2169 {
2170 rtx operands[2];
2171 operands[0] = dest;
2172 operands[1] = source;
2173
2174 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2175 if (GET_CODE (operands[1]) == CONST_DOUBLE
2176 && ! FLOAT_MODE_P (mode)
2177 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2178 {
2179 /* FIXME. This should never happen. */
2180 /* Since it seems that it does, do the safe thing and convert
2181 to a CONST_INT. */
2182 operands[1] =
2183 GEN_INT (trunc_int_for_mode (CONST_DOUBLE_LOW (operands[1]), mode));
2184 }
2185 if (GET_CODE (operands[1]) == CONST_DOUBLE
2186 && ! FLOAT_MODE_P (mode)
2187 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2188 && CONST_DOUBLE_LOW (operands[1]) >= 0)
2189 || (CONST_DOUBLE_HIGH (operands[1]) == -1
2190 && CONST_DOUBLE_LOW (operands[1]) < 0)))
2191 abort ();
2192
2193 /* Check if GCC is setting up a block move that will end up using FP
2194 registers as temporaries. We must make sure this is acceptable. */
2195 if (GET_CODE (operands[0]) == MEM
2196 && GET_CODE (operands[1]) == MEM
2197 && mode == DImode
2198 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2199 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2200 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2201 ? 32 : MEM_ALIGN (operands[0])))
2202 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2203 ? 32
2204 : MEM_ALIGN (operands[1]))))
2205 && ! MEM_VOLATILE_P (operands [0])
2206 && ! MEM_VOLATILE_P (operands [1]))
2207 {
2208 emit_move_insn (adjust_address (operands[0], SImode, 0),
2209 adjust_address (operands[1], SImode, 0));
2210 emit_move_insn (adjust_address (operands[0], SImode, 4),
2211 adjust_address (operands[1], SImode, 4));
2212 return;
2213 }
2214
2215 if (! no_new_pseudos && GET_CODE (operands[0]) != REG)
2216 operands[1] = force_reg (mode, operands[1]);
2217
2218 if (mode == SFmode && ! TARGET_POWERPC && TARGET_HARD_FLOAT
2219 && GET_CODE (operands[0]) == MEM)
2220 {
2221 int regnum;
2222
2223 if (reload_in_progress || reload_completed)
2224 regnum = true_regnum (operands[1]);
2225 else if (GET_CODE (operands[1]) == REG)
2226 regnum = REGNO (operands[1]);
2227 else
2228 regnum = -1;
2229
2230 /* If operands[1] is a register, on POWER it may have
2231 double-precision data in it, so truncate it to single
2232 precision. */
2233 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2234 {
2235 rtx newreg;
2236 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2237 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2238 operands[1] = newreg;
2239 }
2240 }
2241
2242 /* Handle the case where reload calls us with an invalid address;
2243 and the case of CONSTANT_P_RTX. */
2244 if (!VECTOR_MODE_P (mode)
2245 && (! general_operand (operands[1], mode)
2246 || ! nonimmediate_operand (operands[0], mode)
2247 || GET_CODE (operands[1]) == CONSTANT_P_RTX))
2248 {
2249 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2250 return;
2251 }
2252
2253 /* FIXME: In the long term, this switch statement should go away
2254 and be replaced by a sequence of tests based on things like
2255 mode == Pmode. */
2256 switch (mode)
2257 {
2258 case HImode:
2259 case QImode:
2260 if (CONSTANT_P (operands[1])
2261 && GET_CODE (operands[1]) != CONST_INT)
2262 operands[1] = force_const_mem (mode, operands[1]);
2263 break;
2264
2265 case TFmode:
2266 case DFmode:
2267 case SFmode:
2268 if (CONSTANT_P (operands[1])
2269 && ! easy_fp_constant (operands[1], mode))
2270 operands[1] = force_const_mem (mode, operands[1]);
2271 break;
2272
2273 case V16QImode:
2274 case V8HImode:
2275 case V4SFmode:
2276 case V4SImode:
2277 if (CONSTANT_P (operands[1])
2278 && !easy_vector_constant (operands[1]))
2279 operands[1] = force_const_mem (mode, operands[1]);
2280 break;
2281
2282 case SImode:
2283 case DImode:
2284 /* Use default pattern for address of ELF small data */
2285 if (TARGET_ELF
2286 && mode == Pmode
2287 && DEFAULT_ABI == ABI_V4
2288 && (GET_CODE (operands[1]) == SYMBOL_REF
2289 || GET_CODE (operands[1]) == CONST)
2290 && small_data_operand (operands[1], mode))
2291 {
2292 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2293 return;
2294 }
2295
2296 if (DEFAULT_ABI == ABI_V4
2297 && mode == Pmode && mode == SImode
2298 && flag_pic == 1 && got_operand (operands[1], mode))
2299 {
2300 emit_insn (gen_movsi_got (operands[0], operands[1]));
2301 return;
2302 }
2303
2304 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2305 && TARGET_NO_TOC && ! flag_pic
2306 && mode == Pmode
2307 && CONSTANT_P (operands[1])
2308 && GET_CODE (operands[1]) != HIGH
2309 && GET_CODE (operands[1]) != CONST_INT)
2310 {
2311 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2312
2313 /* If this is a function address on -mcall-aixdesc,
2314 convert it to the address of the descriptor. */
2315 if (DEFAULT_ABI == ABI_AIX
2316 && GET_CODE (operands[1]) == SYMBOL_REF
2317 && XSTR (operands[1], 0)[0] == '.')
2318 {
2319 const char *name = XSTR (operands[1], 0);
2320 rtx new_ref;
2321 while (*name == '.')
2322 name++;
2323 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2324 CONSTANT_POOL_ADDRESS_P (new_ref)
2325 = CONSTANT_POOL_ADDRESS_P (operands[1]);
2326 SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2327 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2328 operands[1] = new_ref;
2329 }
2330
2331 if (DEFAULT_ABI == ABI_DARWIN)
2332 {
2333 emit_insn (gen_macho_high (target, operands[1]));
2334 emit_insn (gen_macho_low (operands[0], target, operands[1]));
2335 return;
2336 }
2337
2338 emit_insn (gen_elf_high (target, operands[1]));
2339 emit_insn (gen_elf_low (operands[0], target, operands[1]));
2340 return;
2341 }
2342
2343 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2344 and we have put it in the TOC, we just need to make a TOC-relative
2345 reference to it. */
2346 if (TARGET_TOC
2347 && GET_CODE (operands[1]) == SYMBOL_REF
2348 && CONSTANT_POOL_EXPR_P (operands[1])
2349 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2350 get_pool_mode (operands[1])))
2351 {
2352 operands[1] = create_TOC_reference (operands[1]);
2353 }
2354 else if (mode == Pmode
2355 && CONSTANT_P (operands[1])
2356 && ((GET_CODE (operands[1]) != CONST_INT
2357 && ! easy_fp_constant (operands[1], mode))
2358 || (GET_CODE (operands[1]) == CONST_INT
2359 && num_insns_constant (operands[1], mode) > 2)
2360 || (GET_CODE (operands[0]) == REG
2361 && FP_REGNO_P (REGNO (operands[0]))))
2362 && GET_CODE (operands[1]) != HIGH
2363 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2364 && ! TOC_RELATIVE_EXPR_P (operands[1]))
2365 {
2366 /* Emit a USE operation so that the constant isn't deleted if
2367 expensive optimizations are turned on because nobody
2368 references it. This should only be done for operands that
2369 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2370 This should not be done for operands that contain LABEL_REFs.
2371 For now, we just handle the obvious case. */
2372 if (GET_CODE (operands[1]) != LABEL_REF)
2373 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2374
2375 #if TARGET_MACHO
2376 /* Darwin uses a special PIC legitimizer. */
2377 if (DEFAULT_ABI == ABI_DARWIN && flag_pic)
2378 {
2379 operands[1] =
2380 rs6000_machopic_legitimize_pic_address (operands[1], mode,
2381 operands[0]);
2382 if (operands[0] != operands[1])
2383 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2384 return;
2385 }
2386 #endif
2387
2388 /* If we are to limit the number of things we put in the TOC and
2389 this is a symbol plus a constant we can add in one insn,
2390 just put the symbol in the TOC and add the constant. Don't do
2391 this if reload is in progress. */
2392 if (GET_CODE (operands[1]) == CONST
2393 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2394 && GET_CODE (XEXP (operands[1], 0)) == PLUS
2395 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2396 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2397 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2398 && ! side_effects_p (operands[0]))
2399 {
2400 rtx sym =
2401 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2402 rtx other = XEXP (XEXP (operands[1], 0), 1);
2403
2404 sym = force_reg (mode, sym);
2405 if (mode == SImode)
2406 emit_insn (gen_addsi3 (operands[0], sym, other));
2407 else
2408 emit_insn (gen_adddi3 (operands[0], sym, other));
2409 return;
2410 }
2411
2412 operands[1] = force_const_mem (mode, operands[1]);
2413
2414 if (TARGET_TOC
2415 && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2416 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2417 get_pool_constant (XEXP (operands[1], 0)),
2418 get_pool_mode (XEXP (operands[1], 0))))
2419 {
2420 operands[1]
2421 = gen_rtx_MEM (mode,
2422 create_TOC_reference (XEXP (operands[1], 0)));
2423 set_mem_alias_set (operands[1], get_TOC_alias_set ());
2424 RTX_UNCHANGING_P (operands[1]) = 1;
2425 }
2426 }
2427 break;
2428
2429 case TImode:
2430 if (GET_CODE (operands[0]) == MEM
2431 && GET_CODE (XEXP (operands[0], 0)) != REG
2432 && ! reload_in_progress)
2433 operands[0]
2434 = replace_equiv_address (operands[0],
2435 copy_addr_to_reg (XEXP (operands[0], 0)));
2436
2437 if (GET_CODE (operands[1]) == MEM
2438 && GET_CODE (XEXP (operands[1], 0)) != REG
2439 && ! reload_in_progress)
2440 operands[1]
2441 = replace_equiv_address (operands[1],
2442 copy_addr_to_reg (XEXP (operands[1], 0)));
2443 break;
2444
2445 default:
2446 abort ();
2447 }
2448
2449 /* Above, we may have called force_const_mem which may have returned
2450 an invalid address. If we can, fix this up; otherwise, reload will
2451 have to deal with it. */
2452 if (GET_CODE (operands[1]) == MEM
2453 && ! memory_address_p (mode, XEXP (operands[1], 0))
2454 && ! reload_in_progress)
2455 operands[1] = adjust_address (operands[1], mode, 0);
2456
2457 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2458 return;
2459 }
2460 \f
2461 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2462 for a call to a function whose data type is FNTYPE.
2463 For a library call, FNTYPE is 0.
2464
2465 For incoming args we set the number of arguments in the prototype large
2466 so we never return a PARALLEL. */
2467
2468 void
2469 init_cumulative_args (cum, fntype, libname, incoming)
2470 CUMULATIVE_ARGS *cum;
2471 tree fntype;
2472 rtx libname ATTRIBUTE_UNUSED;
2473 int incoming;
2474 {
2475 static CUMULATIVE_ARGS zero_cumulative;
2476
2477 *cum = zero_cumulative;
2478 cum->words = 0;
2479 cum->fregno = FP_ARG_MIN_REG;
2480 cum->vregno = ALTIVEC_ARG_MIN_REG;
2481 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2482 cum->call_cookie = CALL_NORMAL;
2483 cum->sysv_gregno = GP_ARG_MIN_REG;
2484
2485 if (incoming)
2486 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
2487
2488 else if (cum->prototype)
2489 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2490 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2491 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2492
2493 else
2494 cum->nargs_prototype = 0;
2495
2496 cum->orig_nargs = cum->nargs_prototype;
2497
2498 /* Check for longcall's */
2499 if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
2500 cum->call_cookie = CALL_LONG;
2501
2502 if (TARGET_DEBUG_ARG)
2503 {
2504 fprintf (stderr, "\ninit_cumulative_args:");
2505 if (fntype)
2506 {
2507 tree ret_type = TREE_TYPE (fntype);
2508 fprintf (stderr, " ret code = %s,",
2509 tree_code_name[ (int)TREE_CODE (ret_type) ]);
2510 }
2511
2512 if (cum->call_cookie & CALL_LONG)
2513 fprintf (stderr, " longcall,");
2514
2515 fprintf (stderr, " proto = %d, nargs = %d\n",
2516 cum->prototype, cum->nargs_prototype);
2517 }
2518 }
2519 \f
2520 /* If defined, a C expression which determines whether, and in which
2521 direction, to pad out an argument with extra space. The value
2522 should be of type `enum direction': either `upward' to pad above
2523 the argument, `downward' to pad below, or `none' to inhibit
2524 padding.
2525
2526 For the AIX ABI structs are always stored left shifted in their
2527 argument slot. */
2528
2529 enum direction
2530 function_arg_padding (mode, type)
2531 enum machine_mode mode;
2532 tree type;
2533 {
2534 if (type != 0 && AGGREGATE_TYPE_P (type))
2535 return upward;
2536
2537 /* This is the default definition. */
2538 return (! BYTES_BIG_ENDIAN
2539 ? upward
2540 : ((mode == BLKmode
2541 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2542 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
2543 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
2544 ? downward : upward));
2545 }
2546
2547 /* If defined, a C expression that gives the alignment boundary, in bits,
2548 of an argument with the specified mode and type. If it is not defined,
2549 PARM_BOUNDARY is used for all arguments.
2550
2551 V.4 wants long longs to be double word aligned. */
2552
2553 int
2554 function_arg_boundary (mode, type)
2555 enum machine_mode mode;
2556 tree type ATTRIBUTE_UNUSED;
2557 {
2558 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
2559 return 64;
2560 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2561 return 128;
2562 else
2563 return PARM_BOUNDARY;
2564 }
2565 \f
2566 /* Update the data in CUM to advance over an argument
2567 of mode MODE and data type TYPE.
2568 (TYPE is null for libcalls where that information may not be available.) */
2569
2570 void
2571 function_arg_advance (cum, mode, type, named)
2572 CUMULATIVE_ARGS *cum;
2573 enum machine_mode mode;
2574 tree type;
2575 int named;
2576 {
2577 cum->nargs_prototype--;
2578
2579 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2580 {
2581 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
2582 cum->vregno++;
2583 else
2584 cum->words += RS6000_ARG_SIZE (mode, type);
2585 }
2586 else if (DEFAULT_ABI == ABI_V4)
2587 {
2588 if (TARGET_HARD_FLOAT
2589 && (mode == SFmode || mode == DFmode))
2590 {
2591 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2592 cum->fregno++;
2593 else
2594 {
2595 if (mode == DFmode)
2596 cum->words += cum->words & 1;
2597 cum->words += RS6000_ARG_SIZE (mode, type);
2598 }
2599 }
2600 else
2601 {
2602 int n_words;
2603 int gregno = cum->sysv_gregno;
2604
2605 /* Aggregates and IEEE quad get passed by reference. */
2606 if ((type && AGGREGATE_TYPE_P (type))
2607 || mode == TFmode)
2608 n_words = 1;
2609 else
2610 n_words = RS6000_ARG_SIZE (mode, type);
2611
2612 /* Long long is put in odd registers. */
2613 if (n_words == 2 && (gregno & 1) == 0)
2614 gregno += 1;
2615
2616 /* Long long is not split between registers and stack. */
2617 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
2618 {
2619 /* Long long is aligned on the stack. */
2620 if (n_words == 2)
2621 cum->words += cum->words & 1;
2622 cum->words += n_words;
2623 }
2624
2625 /* Note: continuing to accumulate gregno past when we've started
2626 spilling to the stack indicates the fact that we've started
2627 spilling to the stack to expand_builtin_saveregs. */
2628 cum->sysv_gregno = gregno + n_words;
2629 }
2630
2631 if (TARGET_DEBUG_ARG)
2632 {
2633 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2634 cum->words, cum->fregno);
2635 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
2636 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
2637 fprintf (stderr, "mode = %4s, named = %d\n",
2638 GET_MODE_NAME (mode), named);
2639 }
2640 }
2641 else
2642 {
2643 int align = (TARGET_32BIT && (cum->words & 1) != 0
2644 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2645
2646 cum->words += align + RS6000_ARG_SIZE (mode, type);
2647
2648 if (GET_MODE_CLASS (mode) == MODE_FLOAT && TARGET_HARD_FLOAT)
2649 cum->fregno++;
2650
2651 if (TARGET_DEBUG_ARG)
2652 {
2653 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2654 cum->words, cum->fregno);
2655 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
2656 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
2657 fprintf (stderr, "named = %d, align = %d\n", named, align);
2658 }
2659 }
2660 }
2661 \f
2662 /* Determine where to put an argument to a function.
2663 Value is zero to push the argument on the stack,
2664 or a hard register in which to store the argument.
2665
2666 MODE is the argument's machine mode.
2667 TYPE is the data type of the argument (as a tree).
2668 This is null for libcalls where that information may
2669 not be available.
2670 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2671 the preceding args and about the function being called.
2672 NAMED is nonzero if this argument is a named parameter
2673 (otherwise it is an extra parameter matching an ellipsis).
2674
2675 On RS/6000 the first eight words of non-FP are normally in registers
2676 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
2677 Under V.4, the first 8 FP args are in registers.
2678
2679 If this is floating-point and no prototype is specified, we use
2680 both an FP and integer register (or possibly FP reg and stack). Library
2681 functions (when TYPE is zero) always have the proper types for args,
2682 so we can pass the FP value just in one register. emit_library_function
2683 doesn't support PARALLEL anyway. */
2684
2685 struct rtx_def *
2686 function_arg (cum, mode, type, named)
2687 CUMULATIVE_ARGS *cum;
2688 enum machine_mode mode;
2689 tree type;
2690 int named;
2691 {
2692 enum rs6000_abi abi = DEFAULT_ABI;
2693
2694 /* Return a marker to indicate whether CR1 needs to set or clear the
2695 bit that V.4 uses to say fp args were passed in registers.
2696 Assume that we don't need the marker for software floating point,
2697 or compiler generated library calls. */
2698 if (mode == VOIDmode)
2699 {
2700 if (abi == ABI_V4
2701 && TARGET_HARD_FLOAT
2702 && cum->nargs_prototype < 0
2703 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
2704 {
2705 return GEN_INT (cum->call_cookie
2706 | ((cum->fregno == FP_ARG_MIN_REG)
2707 ? CALL_V4_SET_FP_ARGS
2708 : CALL_V4_CLEAR_FP_ARGS));
2709 }
2710
2711 return GEN_INT (cum->call_cookie);
2712 }
2713
2714 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2715 {
2716 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
2717 return gen_rtx_REG (mode, cum->vregno);
2718 else
2719 return NULL;
2720 }
2721 else if (abi == ABI_V4)
2722 {
2723 if (TARGET_HARD_FLOAT
2724 && (mode == SFmode || mode == DFmode))
2725 {
2726 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2727 return gen_rtx_REG (mode, cum->fregno);
2728 else
2729 return NULL;
2730 }
2731 else
2732 {
2733 int n_words;
2734 int gregno = cum->sysv_gregno;
2735
2736 /* Aggregates and IEEE quad get passed by reference. */
2737 if ((type && AGGREGATE_TYPE_P (type))
2738 || mode == TFmode)
2739 n_words = 1;
2740 else
2741 n_words = RS6000_ARG_SIZE (mode, type);
2742
2743 /* Long long is put in odd registers. */
2744 if (n_words == 2 && (gregno & 1) == 0)
2745 gregno += 1;
2746
2747 /* Long long is not split between registers and stack. */
2748 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
2749 return gen_rtx_REG (mode, gregno);
2750 else
2751 return NULL;
2752 }
2753 }
2754 else
2755 {
2756 int align = (TARGET_32BIT && (cum->words & 1) != 0
2757 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2758 int align_words = cum->words + align;
2759
2760 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2761 return NULL_RTX;
2762
2763 if (USE_FP_FOR_ARG_P (*cum, mode, type))
2764 {
2765 if (! type
2766 || ((cum->nargs_prototype > 0)
2767 /* IBM AIX extended its linkage convention definition always
2768 to require FP args after register save area hole on the
2769 stack. */
2770 && (DEFAULT_ABI != ABI_AIX
2771 || ! TARGET_XL_CALL
2772 || (align_words < GP_ARG_NUM_REG))))
2773 return gen_rtx_REG (mode, cum->fregno);
2774
2775 return gen_rtx_PARALLEL (mode,
2776 gen_rtvec (2,
2777 gen_rtx_EXPR_LIST (VOIDmode,
2778 ((align_words >= GP_ARG_NUM_REG)
2779 ? NULL_RTX
2780 : (align_words
2781 + RS6000_ARG_SIZE (mode, type)
2782 > GP_ARG_NUM_REG
2783 /* If this is partially on the stack, then
2784 we only include the portion actually
2785 in registers here. */
2786 ? gen_rtx_REG (SImode,
2787 GP_ARG_MIN_REG + align_words)
2788 : gen_rtx_REG (mode,
2789 GP_ARG_MIN_REG + align_words))),
2790 const0_rtx),
2791 gen_rtx_EXPR_LIST (VOIDmode,
2792 gen_rtx_REG (mode, cum->fregno),
2793 const0_rtx)));
2794 }
2795 else if (align_words < GP_ARG_NUM_REG)
2796 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
2797 else
2798 return NULL_RTX;
2799 }
2800 }
2801 \f
2802 /* For an arg passed partly in registers and partly in memory,
2803 this is the number of registers used.
2804 For args passed entirely in registers or entirely in memory, zero. */
2805
2806 int
2807 function_arg_partial_nregs (cum, mode, type, named)
2808 CUMULATIVE_ARGS *cum;
2809 enum machine_mode mode;
2810 tree type;
2811 int named ATTRIBUTE_UNUSED;
2812 {
2813 if (DEFAULT_ABI == ABI_V4)
2814 return 0;
2815
2816 if (USE_FP_FOR_ARG_P (*cum, mode, type)
2817 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
2818 {
2819 if (cum->nargs_prototype >= 0)
2820 return 0;
2821 }
2822
2823 if (cum->words < GP_ARG_NUM_REG
2824 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
2825 {
2826 int ret = GP_ARG_NUM_REG - cum->words;
2827 if (ret && TARGET_DEBUG_ARG)
2828 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
2829
2830 return ret;
2831 }
2832
2833 return 0;
2834 }
2835 \f
2836 /* A C expression that indicates when an argument must be passed by
2837 reference. If nonzero for an argument, a copy of that argument is
2838 made in memory and a pointer to the argument is passed instead of
2839 the argument itself. The pointer is passed in whatever way is
2840 appropriate for passing a pointer to that type.
2841
2842 Under V.4, structures and unions are passed by reference. */
2843
2844 int
2845 function_arg_pass_by_reference (cum, mode, type, named)
2846 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
2847 enum machine_mode mode ATTRIBUTE_UNUSED;
2848 tree type;
2849 int named ATTRIBUTE_UNUSED;
2850 {
2851 if (DEFAULT_ABI == ABI_V4
2852 && ((type && AGGREGATE_TYPE_P (type))
2853 || mode == TFmode))
2854 {
2855 if (TARGET_DEBUG_ARG)
2856 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
2857
2858 return 1;
2859 }
2860
2861 return 0;
2862 }
2863 \f
2864 /* Perform any needed actions needed for a function that is receiving a
2865 variable number of arguments.
2866
2867 CUM is as above.
2868
2869 MODE and TYPE are the mode and type of the current parameter.
2870
2871 PRETEND_SIZE is a variable that should be set to the amount of stack
2872 that must be pushed by the prolog to pretend that our caller pushed
2873 it.
2874
2875 Normally, this macro will push all remaining incoming registers on the
2876 stack and set PRETEND_SIZE to the length of the registers pushed. */
2877
2878 void
2879 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
2880 CUMULATIVE_ARGS *cum;
2881 enum machine_mode mode;
2882 tree type;
2883 int *pretend_size;
2884 int no_rtl;
2885
2886 {
2887 CUMULATIVE_ARGS next_cum;
2888 int reg_size = TARGET_32BIT ? 4 : 8;
2889 rtx save_area = NULL_RTX, mem;
2890 int first_reg_offset, set;
2891 tree fntype;
2892 int stdarg_p;
2893
2894 fntype = TREE_TYPE (current_function_decl);
2895 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
2896 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2897 != void_type_node));
2898
2899 /* For varargs, we do not want to skip the dummy va_dcl argument.
2900 For stdargs, we do want to skip the last named argument. */
2901 next_cum = *cum;
2902 if (stdarg_p)
2903 function_arg_advance (&next_cum, mode, type, 1);
2904
2905 if (DEFAULT_ABI == ABI_V4)
2906 {
2907 /* Indicate to allocate space on the stack for varargs save area. */
2908 /* ??? Does this really have to be located at a magic spot on the
2909 stack, or can we allocate this with assign_stack_local instead. */
2910 cfun->machine->sysv_varargs_p = 1;
2911 if (! no_rtl)
2912 save_area = plus_constant (virtual_stack_vars_rtx,
2913 - RS6000_VARARGS_SIZE);
2914
2915 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
2916 }
2917 else
2918 {
2919 first_reg_offset = next_cum.words;
2920 save_area = virtual_incoming_args_rtx;
2921 cfun->machine->sysv_varargs_p = 0;
2922
2923 if (MUST_PASS_IN_STACK (mode, type))
2924 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
2925 }
2926
2927 set = get_varargs_alias_set ();
2928 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
2929 {
2930 mem = gen_rtx_MEM (BLKmode,
2931 plus_constant (save_area,
2932 first_reg_offset * reg_size)),
2933 set_mem_alias_set (mem, set);
2934 set_mem_align (mem, BITS_PER_WORD);
2935
2936 move_block_from_reg
2937 (GP_ARG_MIN_REG + first_reg_offset, mem,
2938 GP_ARG_NUM_REG - first_reg_offset,
2939 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
2940
2941 /* ??? Does ABI_V4 need this at all? */
2942 *pretend_size = (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD;
2943 }
2944
2945 /* Save FP registers if needed. */
2946 if (DEFAULT_ABI == ABI_V4
2947 && TARGET_HARD_FLOAT && ! no_rtl
2948 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
2949 {
2950 int fregno = next_cum.fregno;
2951 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
2952 rtx lab = gen_label_rtx ();
2953 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
2954
2955 emit_jump_insn (gen_rtx_SET (VOIDmode,
2956 pc_rtx,
2957 gen_rtx_IF_THEN_ELSE (VOIDmode,
2958 gen_rtx_NE (VOIDmode, cr1,
2959 const0_rtx),
2960 gen_rtx_LABEL_REF (VOIDmode, lab),
2961 pc_rtx)));
2962
2963 while (fregno <= FP_ARG_V4_MAX_REG)
2964 {
2965 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
2966 set_mem_alias_set (mem, set);
2967 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
2968 fregno++;
2969 off += 8;
2970 }
2971
2972 emit_label (lab);
2973 }
2974 }
2975
2976 /* Create the va_list data type. */
2977
2978 tree
2979 rs6000_build_va_list ()
2980 {
2981 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
2982
2983 /* For AIX, prefer 'char *' because that's what the system
2984 header files like. */
2985 if (DEFAULT_ABI != ABI_V4)
2986 return build_pointer_type (char_type_node);
2987
2988 record = make_lang_type (RECORD_TYPE);
2989 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
2990
2991 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
2992 unsigned_char_type_node);
2993 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
2994 unsigned_char_type_node);
2995 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
2996 ptr_type_node);
2997 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
2998 ptr_type_node);
2999
3000 DECL_FIELD_CONTEXT (f_gpr) = record;
3001 DECL_FIELD_CONTEXT (f_fpr) = record;
3002 DECL_FIELD_CONTEXT (f_ovf) = record;
3003 DECL_FIELD_CONTEXT (f_sav) = record;
3004
3005 TREE_CHAIN (record) = type_decl;
3006 TYPE_NAME (record) = type_decl;
3007 TYPE_FIELDS (record) = f_gpr;
3008 TREE_CHAIN (f_gpr) = f_fpr;
3009 TREE_CHAIN (f_fpr) = f_ovf;
3010 TREE_CHAIN (f_ovf) = f_sav;
3011
3012 layout_type (record);
3013
3014 /* The correct type is an array type of one element. */
3015 return build_array_type (record, build_index_type (size_zero_node));
3016 }
3017
3018 /* Implement va_start. */
3019
3020 void
3021 rs6000_va_start (stdarg_p, valist, nextarg)
3022 int stdarg_p;
3023 tree valist;
3024 rtx nextarg;
3025 {
3026 HOST_WIDE_INT words, n_gpr, n_fpr;
3027 tree f_gpr, f_fpr, f_ovf, f_sav;
3028 tree gpr, fpr, ovf, sav, t;
3029
3030 /* Only SVR4 needs something special. */
3031 if (DEFAULT_ABI != ABI_V4)
3032 {
3033 std_expand_builtin_va_start (stdarg_p, valist, nextarg);
3034 return;
3035 }
3036
3037 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3038 f_fpr = TREE_CHAIN (f_gpr);
3039 f_ovf = TREE_CHAIN (f_fpr);
3040 f_sav = TREE_CHAIN (f_ovf);
3041
3042 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3043 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3044 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3045 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3046 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3047
3048 /* Count number of gp and fp argument registers used. */
3049 words = current_function_args_info.words;
3050 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
3051 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
3052
3053 if (TARGET_DEBUG_ARG)
3054 {
3055 fputs ("va_start: words = ", stderr);
3056 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
3057 fputs (", n_gpr = ", stderr);
3058 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
3059 fputs (", n_fpr = ", stderr);
3060 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
3061 putc ('\n', stderr);
3062 }
3063
3064 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
3065 TREE_SIDE_EFFECTS (t) = 1;
3066 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3067
3068 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
3069 TREE_SIDE_EFFECTS (t) = 1;
3070 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3071
3072 /* Find the overflow area. */
3073 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3074 if (words != 0)
3075 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3076 build_int_2 (words * UNITS_PER_WORD, 0));
3077 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3078 TREE_SIDE_EFFECTS (t) = 1;
3079 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3080
3081 /* Find the register save area. */
3082 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3083 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3084 build_int_2 (-RS6000_VARARGS_SIZE, -1));
3085 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3086 TREE_SIDE_EFFECTS (t) = 1;
3087 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3088 }
3089
3090 /* Implement va_arg. */
3091
3092 rtx
3093 rs6000_va_arg (valist, type)
3094 tree valist, type;
3095 {
3096 tree f_gpr, f_fpr, f_ovf, f_sav;
3097 tree gpr, fpr, ovf, sav, reg, t, u;
3098 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3099 rtx lab_false, lab_over, addr_rtx, r;
3100
3101 /* For AIX, the rule is that structures are passed left-aligned in
3102 their stack slot. However, GCC does not presently do this:
3103 structures which are the same size as integer types are passed
3104 right-aligned, as if they were in fact integers. This only
3105 matters for structures of size 1 or 2, or 4 when TARGET_64BIT. */
3106 if (DEFAULT_ABI != ABI_V4)
3107 {
3108 HOST_WIDE_INT align, rounded_size;
3109 enum machine_mode mode;
3110 tree addr_tree;
3111
3112 /* Compute the rounded size of the type. */
3113 align = PARM_BOUNDARY / BITS_PER_UNIT;
3114 rounded_size = (((int_size_in_bytes (type) + align - 1) / align)
3115 * align);
3116
3117 addr_tree = valist;
3118
3119 mode = TYPE_MODE (type);
3120 if (mode != BLKmode)
3121 {
3122 HOST_WIDE_INT adj;
3123 adj = TREE_INT_CST_LOW (TYPE_SIZE (type)) / BITS_PER_UNIT;
3124 if (rounded_size > align)
3125 adj = rounded_size;
3126
3127 addr_tree = build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree,
3128 build_int_2 (rounded_size - adj, 0));
3129 }
3130
3131 addr_rtx = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3132 addr_rtx = copy_to_reg (addr_rtx);
3133
3134 /* Compute new value for AP. */
3135 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3136 build (PLUS_EXPR, TREE_TYPE (valist), valist,
3137 build_int_2 (rounded_size, 0)));
3138 TREE_SIDE_EFFECTS (t) = 1;
3139 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3140
3141 return addr_rtx;
3142 }
3143
3144 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3145 f_fpr = TREE_CHAIN (f_gpr);
3146 f_ovf = TREE_CHAIN (f_fpr);
3147 f_sav = TREE_CHAIN (f_ovf);
3148
3149 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3150 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3151 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3152 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3153 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3154
3155 size = int_size_in_bytes (type);
3156 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3157
3158 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3159 {
3160 /* Aggregates and long doubles are passed by reference. */
3161 indirect_p = 1;
3162 reg = gpr;
3163 n_reg = 1;
3164 sav_ofs = 0;
3165 sav_scale = 4;
3166 size = rsize = UNITS_PER_WORD;
3167 }
3168 else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT)
3169 {
3170 /* FP args go in FP registers, if present. */
3171 indirect_p = 0;
3172 reg = fpr;
3173 n_reg = 1;
3174 sav_ofs = 8*4;
3175 sav_scale = 8;
3176 }
3177 else
3178 {
3179 /* Otherwise into GP registers. */
3180 indirect_p = 0;
3181 reg = gpr;
3182 n_reg = rsize;
3183 sav_ofs = 0;
3184 sav_scale = 4;
3185 }
3186
3187 /* Pull the value out of the saved registers ... */
3188
3189 lab_false = gen_label_rtx ();
3190 lab_over = gen_label_rtx ();
3191 addr_rtx = gen_reg_rtx (Pmode);
3192
3193 emit_cmp_and_jump_insns (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3194 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3195 lab_false);
3196
3197 /* Long long is aligned in the registers. */
3198 if (n_reg > 1)
3199 {
3200 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3201 build_int_2 (n_reg - 1, 0));
3202 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3203 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3204 TREE_SIDE_EFFECTS (u) = 1;
3205 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3206 }
3207
3208 if (sav_ofs)
3209 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3210 else
3211 t = sav;
3212
3213 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, build_int_2 (n_reg, 0));
3214 TREE_SIDE_EFFECTS (u) = 1;
3215
3216 u = build1 (CONVERT_EXPR, integer_type_node, u);
3217 TREE_SIDE_EFFECTS (u) = 1;
3218
3219 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3220 TREE_SIDE_EFFECTS (u) = 1;
3221
3222 t = build (PLUS_EXPR, ptr_type_node, t, u);
3223 TREE_SIDE_EFFECTS (t) = 1;
3224
3225 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3226 if (r != addr_rtx)
3227 emit_move_insn (addr_rtx, r);
3228
3229 emit_jump_insn (gen_jump (lab_over));
3230 emit_barrier ();
3231 emit_label (lab_false);
3232
3233 /* ... otherwise out of the overflow area. */
3234
3235 /* Make sure we don't find reg 7 for the next int arg. */
3236 if (n_reg > 1)
3237 {
3238 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3239 TREE_SIDE_EFFECTS (t) = 1;
3240 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3241 }
3242
3243 /* Care for on-stack alignment if needed. */
3244 if (rsize <= 1)
3245 t = ovf;
3246 else
3247 {
3248 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (7, 0));
3249 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-8, -1));
3250 }
3251 t = save_expr (t);
3252
3253 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3254 if (r != addr_rtx)
3255 emit_move_insn (addr_rtx, r);
3256
3257 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3258 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3259 TREE_SIDE_EFFECTS (t) = 1;
3260 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3261
3262 emit_label (lab_over);
3263
3264 if (indirect_p)
3265 {
3266 r = gen_rtx_MEM (Pmode, addr_rtx);
3267 set_mem_alias_set (r, get_varargs_alias_set ());
3268 emit_move_insn (addr_rtx, r);
3269 }
3270
3271 return addr_rtx;
3272 }
3273
3274 /* Builtins. */
3275
3276 #define def_builtin(MASK, NAME, TYPE, CODE) \
3277 do { \
3278 if ((MASK) & target_flags) \
3279 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL); \
3280 } while (0)
3281
3282 struct builtin_description
3283 {
3284 const unsigned int mask;
3285 const enum insn_code icode;
3286 const char *const name;
3287 const enum rs6000_builtins code;
3288 };
3289
3290 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3291
3292 static const struct builtin_description bdesc_3arg[] =
3293 {
3294 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3295 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3296 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3297 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3298 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3299 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3300 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3301 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3302 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3303 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3304 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3305 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3306 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3307 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3308 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3309 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3310 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3311 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3312 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3313 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3314 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3315 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3316 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3317 };
3318
3319 /* DST operations: void foo (void *, const int, const char). */
3320
3321 static const struct builtin_description bdesc_dst[] =
3322 {
3323 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3324 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3325 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3326 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3327 };
3328
3329 /* Simple binary operations: VECc = foo (VECa, VECb). */
3330
3331 static const struct builtin_description bdesc_2arg[] =
3332 {
3333 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3334 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3335 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3336 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3337 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3338 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3339 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3340 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3341 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3342 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3343 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3344 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3345 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3346 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3347 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3348 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3349 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3350 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3351 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3352 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3353 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3354 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3355 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3356 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3357 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3358 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3359 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3360 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3361 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3362 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3363 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3364 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3365 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3366 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3367 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
3368 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
3369 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3370 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3371 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3372 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3373 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3374 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3375 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3376 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3377 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3378 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3379 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3380 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3381 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3382 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3383 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3384 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3385 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3386 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3387 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3388 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3389 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3390 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3391 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3392 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3393 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3394 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3395 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3396 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3397 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3398 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3399 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3400 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3401 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3402 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3403 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3404 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3405 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3406 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3407 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3408 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3409 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3410 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3411 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3412 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3413 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3414 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3415 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3416 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3417 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3418 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
3419 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
3420 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3421 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3422 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3423 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3424 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3425 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3426 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3427 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3428 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3429 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3430 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3431 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3432 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3433 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3434 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3435 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3436 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3437 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3438 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3439 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3440 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3441 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3442 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3443 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3444 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3445 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3446 };
3447
3448 /* AltiVec predicates. */
3449
3450 struct builtin_description_predicates
3451 {
3452 const unsigned int mask;
3453 const enum insn_code icode;
3454 const char *opcode;
3455 const char *const name;
3456 const enum rs6000_builtins code;
3457 };
3458
3459 static const struct builtin_description_predicates bdesc_altivec_preds[] =
3460 {
3461 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
3462 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
3463 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
3464 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
3465 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
3466 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
3467 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
3468 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
3469 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
3470 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
3471 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
3472 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
3473 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
3474 };
3475
3476 /* ABS* opreations. */
3477
3478 static const struct builtin_description bdesc_abs[] =
3479 {
3480 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
3481 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
3482 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
3483 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
3484 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
3485 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
3486 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
3487 };
3488
3489 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
3490 foo (VECa). */
3491
3492 static const struct builtin_description bdesc_1arg[] =
3493 {
3494 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
3495 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
3496 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
3497 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
3498 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
3499 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
3500 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
3501 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
3502 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
3503 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
3504 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
3505 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
3506 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
3507 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
3508 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
3509 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
3510 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
3511 };
3512
3513 static rtx
3514 altivec_expand_unop_builtin (icode, arglist, target)
3515 enum insn_code icode;
3516 tree arglist;
3517 rtx target;
3518 {
3519 rtx pat;
3520 tree arg0 = TREE_VALUE (arglist);
3521 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3522 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3523 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3524
3525 /* If we got invalid arguments bail out before generating bad rtl. */
3526 if (arg0 == error_mark_node)
3527 return NULL_RTX;
3528
3529 if (target == 0
3530 || GET_MODE (target) != tmode
3531 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3532 target = gen_reg_rtx (tmode);
3533
3534 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3535 op0 = copy_to_mode_reg (mode0, op0);
3536
3537 pat = GEN_FCN (icode) (target, op0);
3538 if (! pat)
3539 return 0;
3540 emit_insn (pat);
3541
3542 return target;
3543 }
3544
3545 static rtx
3546 altivec_expand_abs_builtin (icode, arglist, target)
3547 enum insn_code icode;
3548 tree arglist;
3549 rtx target;
3550 {
3551 rtx pat, scratch1, scratch2;
3552 tree arg0 = TREE_VALUE (arglist);
3553 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3554 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3555 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3556
3557 /* If we have invalid arguments, bail out before generating bad rtl. */
3558 if (arg0 == error_mark_node)
3559 return NULL_RTX;
3560
3561 if (target == 0
3562 || GET_MODE (target) != tmode
3563 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3564 target = gen_reg_rtx (tmode);
3565
3566 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3567 op0 = copy_to_mode_reg (mode0, op0);
3568
3569 scratch1 = gen_reg_rtx (mode0);
3570 scratch2 = gen_reg_rtx (mode0);
3571
3572 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
3573 if (! pat)
3574 return 0;
3575 emit_insn (pat);
3576
3577 return target;
3578 }
3579
3580 static rtx
3581 altivec_expand_binop_builtin (icode, arglist, target)
3582 enum insn_code icode;
3583 tree arglist;
3584 rtx target;
3585 {
3586 rtx pat;
3587 tree arg0 = TREE_VALUE (arglist);
3588 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3589 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3590 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3591 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3592 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3593 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3594
3595 /* If we got invalid arguments bail out before generating bad rtl. */
3596 if (arg0 == error_mark_node || arg1 == error_mark_node)
3597 return NULL_RTX;
3598
3599 if (target == 0
3600 || GET_MODE (target) != tmode
3601 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3602 target = gen_reg_rtx (tmode);
3603
3604 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3605 op0 = copy_to_mode_reg (mode0, op0);
3606 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3607 op1 = copy_to_mode_reg (mode1, op1);
3608
3609 pat = GEN_FCN (icode) (target, op0, op1);
3610 if (! pat)
3611 return 0;
3612 emit_insn (pat);
3613
3614 return target;
3615 }
3616
3617 static rtx
3618 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
3619 enum insn_code icode;
3620 const char *opcode;
3621 tree arglist;
3622 rtx target;
3623 {
3624 rtx pat, scratch;
3625 tree cr6_form = TREE_VALUE (arglist);
3626 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
3627 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3628 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3629 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3630 enum machine_mode tmode = SImode;
3631 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3632 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3633 int cr6_form_int;
3634
3635 if (TREE_CODE (cr6_form) != INTEGER_CST)
3636 {
3637 error ("argument 1 of __builtin_altivec_predicate must be a constant");
3638 return NULL_RTX;
3639 }
3640 else
3641 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
3642
3643 if (mode0 != mode1)
3644 abort ();
3645
3646 /* If we have invalid arguments, bail out before generating bad rtl. */
3647 if (arg0 == error_mark_node || arg1 == error_mark_node)
3648 return NULL_RTX;
3649
3650 if (target == 0
3651 || GET_MODE (target) != tmode
3652 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3653 target = gen_reg_rtx (tmode);
3654
3655 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3656 op0 = copy_to_mode_reg (mode0, op0);
3657 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3658 op1 = copy_to_mode_reg (mode1, op1);
3659
3660 scratch = gen_reg_rtx (mode0);
3661
3662 pat = GEN_FCN (icode) (scratch, op0, op1,
3663 gen_rtx (SYMBOL_REF, Pmode, opcode));
3664 if (! pat)
3665 return 0;
3666 emit_insn (pat);
3667
3668 /* The vec_any* and vec_all* predicates use the same opcodes for two
3669 different operations, but the bits in CR6 will be different
3670 depending on what information we want. So we have to play tricks
3671 with CR6 to get the right bits out.
3672
3673 If you think this is disgusting, look at the specs for the
3674 AltiVec predicates. */
3675
3676 switch (cr6_form_int)
3677 {
3678 case 0:
3679 emit_insn (gen_cr6_test_for_zero (target));
3680 break;
3681 case 1:
3682 emit_insn (gen_cr6_test_for_zero_reverse (target));
3683 break;
3684 case 2:
3685 emit_insn (gen_cr6_test_for_lt (target));
3686 break;
3687 case 3:
3688 emit_insn (gen_cr6_test_for_lt_reverse (target));
3689 break;
3690 default:
3691 error ("argument 1 of __builtin_altivec_predicate is out of range");
3692 break;
3693 }
3694
3695 return target;
3696 }
3697
3698 static rtx
3699 altivec_expand_stv_builtin (icode, arglist)
3700 enum insn_code icode;
3701 tree arglist;
3702 {
3703 tree arg0 = TREE_VALUE (arglist);
3704 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3705 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3706 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3707 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3708 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3709 rtx pat;
3710 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
3711 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
3712 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
3713
3714 /* Invalid arguments. Bail before doing anything stoopid! */
3715 if (arg0 == error_mark_node
3716 || arg1 == error_mark_node
3717 || arg2 == error_mark_node)
3718 return NULL_RTX;
3719
3720 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
3721 op0 = copy_to_mode_reg (mode2, op0);
3722 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
3723 op1 = copy_to_mode_reg (mode0, op1);
3724 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
3725 op2 = copy_to_mode_reg (mode1, op2);
3726
3727 pat = GEN_FCN (icode) (op1, op2, op0);
3728 if (pat)
3729 emit_insn (pat);
3730 return NULL_RTX;
3731 }
3732
3733 static rtx
3734 altivec_expand_ternop_builtin (icode, arglist, target)
3735 enum insn_code icode;
3736 tree arglist;
3737 rtx target;
3738 {
3739 rtx pat;
3740 tree arg0 = TREE_VALUE (arglist);
3741 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3742 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3743 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3744 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3745 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3746 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3747 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3748 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3749 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
3750
3751 /* If we got invalid arguments bail out before generating bad rtl. */
3752 if (arg0 == error_mark_node
3753 || arg1 == error_mark_node
3754 || arg2 == error_mark_node)
3755 return NULL_RTX;
3756
3757 if (target == 0
3758 || GET_MODE (target) != tmode
3759 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3760 target = gen_reg_rtx (tmode);
3761
3762 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3763 op0 = copy_to_mode_reg (mode0, op0);
3764 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3765 op1 = copy_to_mode_reg (mode1, op1);
3766 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
3767 op2 = copy_to_mode_reg (mode2, op2);
3768
3769 pat = GEN_FCN (icode) (target, op0, op1, op2);
3770 if (! pat)
3771 return 0;
3772 emit_insn (pat);
3773
3774 return target;
3775 }
3776 static rtx
3777 altivec_expand_builtin (exp, target)
3778 tree exp;
3779 rtx target;
3780 {
3781 struct builtin_description *d;
3782 struct builtin_description_predicates *dp;
3783 size_t i;
3784 enum insn_code icode;
3785 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3786 tree arglist = TREE_OPERAND (exp, 1);
3787 tree arg0, arg1, arg2;
3788 rtx op0, op1, op2, pat;
3789 enum machine_mode tmode, mode0, mode1, mode2;
3790 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3791
3792 switch (fcode)
3793 {
3794 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
3795 icode = CODE_FOR_altivec_lvx_16qi;
3796 arg0 = TREE_VALUE (arglist);
3797 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3798 tmode = insn_data[icode].operand[0].mode;
3799 mode0 = insn_data[icode].operand[1].mode;
3800
3801 if (target == 0
3802 || GET_MODE (target) != tmode
3803 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3804 target = gen_reg_rtx (tmode);
3805
3806 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3807 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3808
3809 pat = GEN_FCN (icode) (target, op0);
3810 if (! pat)
3811 return 0;
3812 emit_insn (pat);
3813 return target;
3814
3815 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
3816 icode = CODE_FOR_altivec_lvx_8hi;
3817 arg0 = TREE_VALUE (arglist);
3818 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3819 tmode = insn_data[icode].operand[0].mode;
3820 mode0 = insn_data[icode].operand[1].mode;
3821
3822 if (target == 0
3823 || GET_MODE (target) != tmode
3824 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3825 target = gen_reg_rtx (tmode);
3826
3827 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3828 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3829
3830 pat = GEN_FCN (icode) (target, op0);
3831 if (! pat)
3832 return 0;
3833 emit_insn (pat);
3834 return target;
3835
3836 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
3837 icode = CODE_FOR_altivec_lvx_4si;
3838 arg0 = TREE_VALUE (arglist);
3839 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3840 tmode = insn_data[icode].operand[0].mode;
3841 mode0 = insn_data[icode].operand[1].mode;
3842
3843 if (target == 0
3844 || GET_MODE (target) != tmode
3845 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3846 target = gen_reg_rtx (tmode);
3847
3848 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3849 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3850
3851 pat = GEN_FCN (icode) (target, op0);
3852 if (! pat)
3853 return 0;
3854 emit_insn (pat);
3855 return target;
3856
3857 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
3858 icode = CODE_FOR_altivec_lvx_4sf;
3859 arg0 = TREE_VALUE (arglist);
3860 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3861 tmode = insn_data[icode].operand[0].mode;
3862 mode0 = insn_data[icode].operand[1].mode;
3863
3864 if (target == 0
3865 || GET_MODE (target) != tmode
3866 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3867 target = gen_reg_rtx (tmode);
3868
3869 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3870 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3871
3872 pat = GEN_FCN (icode) (target, op0);
3873 if (! pat)
3874 return 0;
3875 emit_insn (pat);
3876 return target;
3877
3878 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
3879 icode = CODE_FOR_altivec_stvx_16qi;
3880 arg0 = TREE_VALUE (arglist);
3881 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3882 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3883 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3884 mode0 = insn_data[icode].operand[0].mode;
3885 mode1 = insn_data[icode].operand[1].mode;
3886
3887 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3888 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3889 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3890 op1 = copy_to_mode_reg (mode1, op1);
3891
3892 pat = GEN_FCN (icode) (op0, op1);
3893 if (pat)
3894 emit_insn (pat);
3895 return NULL_RTX;
3896
3897 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
3898 icode = CODE_FOR_altivec_stvx_8hi;
3899 arg0 = TREE_VALUE (arglist);
3900 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3901 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3902 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3903 mode0 = insn_data[icode].operand[0].mode;
3904 mode1 = insn_data[icode].operand[1].mode;
3905
3906 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3907 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3908 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3909 op1 = copy_to_mode_reg (mode1, op1);
3910
3911 pat = GEN_FCN (icode) (op0, op1);
3912 if (pat)
3913 emit_insn (pat);
3914 return NULL_RTX;
3915
3916 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
3917 icode = CODE_FOR_altivec_stvx_4si;
3918 arg0 = TREE_VALUE (arglist);
3919 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3920 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3921 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3922 mode0 = insn_data[icode].operand[0].mode;
3923 mode1 = insn_data[icode].operand[1].mode;
3924
3925 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3926 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3927 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3928 op1 = copy_to_mode_reg (mode1, op1);
3929
3930 pat = GEN_FCN (icode) (op0, op1);
3931 if (pat)
3932 emit_insn (pat);
3933 return NULL_RTX;
3934
3935 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
3936 icode = CODE_FOR_altivec_stvx_4sf;
3937 arg0 = TREE_VALUE (arglist);
3938 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3939 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3940 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3941 mode0 = insn_data[icode].operand[0].mode;
3942 mode1 = insn_data[icode].operand[1].mode;
3943
3944 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3945 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3946 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3947 op1 = copy_to_mode_reg (mode1, op1);
3948
3949 pat = GEN_FCN (icode) (op0, op1);
3950 if (pat)
3951 emit_insn (pat);
3952 return NULL_RTX;
3953
3954 case ALTIVEC_BUILTIN_STVX:
3955 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
3956 case ALTIVEC_BUILTIN_STVEBX:
3957 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
3958 case ALTIVEC_BUILTIN_STVEHX:
3959 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
3960 case ALTIVEC_BUILTIN_STVEWX:
3961 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
3962 case ALTIVEC_BUILTIN_STVXL:
3963 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
3964
3965 case ALTIVEC_BUILTIN_MFVSCR:
3966 icode = CODE_FOR_altivec_mfvscr;
3967 tmode = insn_data[icode].operand[0].mode;
3968
3969 if (target == 0
3970 || GET_MODE (target) != tmode
3971 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3972 target = gen_reg_rtx (tmode);
3973
3974 pat = GEN_FCN (icode) (target);
3975 if (! pat)
3976 return 0;
3977 emit_insn (pat);
3978 return target;
3979
3980 case ALTIVEC_BUILTIN_MTVSCR:
3981 icode = CODE_FOR_altivec_mtvscr;
3982 arg0 = TREE_VALUE (arglist);
3983 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3984 mode0 = insn_data[icode].operand[0].mode;
3985
3986 /* If we got invalid arguments bail out before generating bad rtl. */
3987 if (arg0 == error_mark_node)
3988 return NULL_RTX;
3989
3990 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3991 op0 = copy_to_mode_reg (mode0, op0);
3992
3993 pat = GEN_FCN (icode) (op0);
3994 if (pat)
3995 emit_insn (pat);
3996 return NULL_RTX;
3997
3998 case ALTIVEC_BUILTIN_DSSALL:
3999 emit_insn (gen_altivec_dssall ());
4000 return NULL_RTX;
4001
4002 case ALTIVEC_BUILTIN_DSS:
4003 icode = CODE_FOR_altivec_dss;
4004 arg0 = TREE_VALUE (arglist);
4005 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4006 mode0 = insn_data[icode].operand[0].mode;
4007
4008 /* If we got invalid arguments bail out before generating bad rtl. */
4009 if (arg0 == error_mark_node)
4010 return NULL_RTX;
4011
4012 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4013 op0 = copy_to_mode_reg (mode0, op0);
4014
4015 emit_insn (gen_altivec_dss (op0));
4016 return NULL_RTX;
4017 }
4018
4019 /* Handle DST variants. */
4020 d = (struct builtin_description *) bdesc_dst;
4021 for (i = 0; i < sizeof (bdesc_dst) / sizeof *d; i++, d++)
4022 if (d->code == fcode)
4023 {
4024 arg0 = TREE_VALUE (arglist);
4025 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4026 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4027 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4028 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4029 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4030 mode0 = insn_data[d->icode].operand[0].mode;
4031 mode1 = insn_data[d->icode].operand[1].mode;
4032 mode2 = insn_data[d->icode].operand[2].mode;
4033
4034 /* Invalid arguments, bail out before generating bad rtl. */
4035 if (arg0 == error_mark_node
4036 || arg1 == error_mark_node
4037 || arg2 == error_mark_node)
4038 return NULL_RTX;
4039
4040 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
4041 op0 = copy_to_mode_reg (mode0, op0);
4042 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
4043 op1 = copy_to_mode_reg (mode1, op1);
4044
4045 if (GET_CODE (op2) != CONST_INT || INTVAL (op2) > 3)
4046 {
4047 error ("argument 3 of `%s' must be a 2-bit literal", d->name);
4048 return NULL_RTX;
4049 }
4050
4051 pat = GEN_FCN (d->icode) (op0, op1, op2);
4052 if (pat != 0)
4053 emit_insn (pat);
4054
4055 return NULL_RTX;
4056 }
4057
4058 /* Expand abs* operations. */
4059 d = (struct builtin_description *) bdesc_abs;
4060 for (i = 0; i < sizeof (bdesc_abs) / sizeof *d; i++, d++)
4061 if (d->code == fcode)
4062 return altivec_expand_abs_builtin (d->icode, arglist, target);
4063
4064 /* Handle simple unary operations. */
4065 d = (struct builtin_description *) bdesc_1arg;
4066 for (i = 0; i < sizeof (bdesc_1arg) / sizeof *d; i++, d++)
4067 if (d->code == fcode)
4068 return altivec_expand_unop_builtin (d->icode, arglist, target);
4069
4070 /* Handle simple binary operations. */
4071 d = (struct builtin_description *) bdesc_2arg;
4072 for (i = 0; i < sizeof (bdesc_2arg) / sizeof *d; i++, d++)
4073 if (d->code == fcode)
4074 return altivec_expand_binop_builtin (d->icode, arglist, target);
4075
4076 /* Expand the AltiVec predicates. */
4077 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4078 for (i = 0; i < sizeof (bdesc_altivec_preds) / sizeof *dp; i++, dp++)
4079 if (dp->code == fcode)
4080 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
4081
4082 /* LV* are funky. We initialized them differently. */
4083 switch (fcode)
4084 {
4085 case ALTIVEC_BUILTIN_LVSL:
4086 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvsl,
4087 arglist, target);
4088 case ALTIVEC_BUILTIN_LVSR:
4089 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvsr,
4090 arglist, target);
4091 case ALTIVEC_BUILTIN_LVEBX:
4092 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvebx,
4093 arglist, target);
4094 case ALTIVEC_BUILTIN_LVEHX:
4095 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvehx,
4096 arglist, target);
4097 case ALTIVEC_BUILTIN_LVEWX:
4098 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvewx,
4099 arglist, target);
4100 case ALTIVEC_BUILTIN_LVXL:
4101 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvxl,
4102 arglist, target);
4103 case ALTIVEC_BUILTIN_LVX:
4104 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvx,
4105 arglist, target);
4106 default:
4107 break;
4108 /* Fall through. */
4109 }
4110
4111 /* Handle simple ternary operations. */
4112 d = (struct builtin_description *) bdesc_3arg;
4113 for (i = 0; i < sizeof (bdesc_3arg) / sizeof *d; i++, d++)
4114 if (d->code == fcode)
4115 return altivec_expand_ternop_builtin (d->icode, arglist, target);
4116
4117 abort ();
4118 return NULL_RTX;
4119 }
4120
4121 /* Expand an expression EXP that calls a built-in function,
4122 with result going to TARGET if that's convenient
4123 (and in mode MODE if that's convenient).
4124 SUBTARGET may be used as the target for computing one of EXP's operands.
4125 IGNORE is nonzero if the value is to be ignored. */
4126
4127 static rtx
4128 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
4129 tree exp;
4130 rtx target;
4131 rtx subtarget ATTRIBUTE_UNUSED;
4132 enum machine_mode mode ATTRIBUTE_UNUSED;
4133 int ignore ATTRIBUTE_UNUSED;
4134 {
4135 if (TARGET_ALTIVEC)
4136 return altivec_expand_builtin (exp, target);
4137
4138 abort ();
4139 }
4140
4141 static void
4142 rs6000_init_builtins ()
4143 {
4144 if (TARGET_ALTIVEC)
4145 altivec_init_builtins ();
4146 }
4147
4148 static void
4149 altivec_init_builtins (void)
4150 {
4151 struct builtin_description *d;
4152 struct builtin_description_predicates *dp;
4153 size_t i;
4154
4155 tree endlink = void_list_node;
4156
4157 tree pint_type_node = build_pointer_type (integer_type_node);
4158 tree pvoid_type_node = build_pointer_type (void_type_node);
4159 tree pshort_type_node = build_pointer_type (short_integer_type_node);
4160 tree pchar_type_node = build_pointer_type (char_type_node);
4161 tree pfloat_type_node = build_pointer_type (float_type_node);
4162
4163 tree v4sf_ftype_v4sf_v4sf_v16qi
4164 = build_function_type (V4SF_type_node,
4165 tree_cons (NULL_TREE, V4SF_type_node,
4166 tree_cons (NULL_TREE, V4SF_type_node,
4167 tree_cons (NULL_TREE,
4168 V16QI_type_node,
4169 endlink))));
4170 tree v4si_ftype_v4si_v4si_v16qi
4171 = build_function_type (V4SI_type_node,
4172 tree_cons (NULL_TREE, V4SI_type_node,
4173 tree_cons (NULL_TREE, V4SI_type_node,
4174 tree_cons (NULL_TREE,
4175 V16QI_type_node,
4176 endlink))));
4177 tree v8hi_ftype_v8hi_v8hi_v16qi
4178 = build_function_type (V8HI_type_node,
4179 tree_cons (NULL_TREE, V8HI_type_node,
4180 tree_cons (NULL_TREE, V8HI_type_node,
4181 tree_cons (NULL_TREE,
4182 V16QI_type_node,
4183 endlink))));
4184 tree v16qi_ftype_v16qi_v16qi_v16qi
4185 = build_function_type (V16QI_type_node,
4186 tree_cons (NULL_TREE, V16QI_type_node,
4187 tree_cons (NULL_TREE, V16QI_type_node,
4188 tree_cons (NULL_TREE,
4189 V16QI_type_node,
4190 endlink))));
4191
4192 /* V4SI foo (char). */
4193 tree v4si_ftype_char
4194 = build_function_type (V4SI_type_node,
4195 tree_cons (NULL_TREE, char_type_node, endlink));
4196
4197 /* V8HI foo (char). */
4198 tree v8hi_ftype_char
4199 = build_function_type (V8HI_type_node,
4200 tree_cons (NULL_TREE, char_type_node, endlink));
4201
4202 /* V16QI foo (char). */
4203 tree v16qi_ftype_char
4204 = build_function_type (V16QI_type_node,
4205 tree_cons (NULL_TREE, char_type_node, endlink));
4206 /* V4SF foo (V4SF). */
4207 tree v4sf_ftype_v4sf
4208 = build_function_type (V4SF_type_node,
4209 tree_cons (NULL_TREE, V4SF_type_node, endlink));
4210
4211 /* V4SI foo (int *). */
4212 tree v4si_ftype_pint
4213 = build_function_type (V4SI_type_node,
4214 tree_cons (NULL_TREE, pint_type_node, endlink));
4215 /* V8HI foo (short *). */
4216 tree v8hi_ftype_pshort
4217 = build_function_type (V8HI_type_node,
4218 tree_cons (NULL_TREE, pshort_type_node, endlink));
4219 /* V16QI foo (char *). */
4220 tree v16qi_ftype_pchar
4221 = build_function_type (V16QI_type_node,
4222 tree_cons (NULL_TREE, pchar_type_node, endlink));
4223 /* V4SF foo (float *). */
4224 tree v4sf_ftype_pfloat
4225 = build_function_type (V4SF_type_node,
4226 tree_cons (NULL_TREE, pfloat_type_node, endlink));
4227
4228 /* V8HI foo (V16QI). */
4229 tree v8hi_ftype_v16qi
4230 = build_function_type (V8HI_type_node,
4231 tree_cons (NULL_TREE, V16QI_type_node, endlink));
4232
4233 /* void foo (void *, int, char/literal). */
4234 tree void_ftype_pvoid_int_char
4235 = build_function_type (void_type_node,
4236 tree_cons (NULL_TREE, pvoid_type_node,
4237 tree_cons (NULL_TREE, integer_type_node,
4238 tree_cons (NULL_TREE,
4239 char_type_node,
4240 endlink))));
4241
4242 /* void foo (int *, V4SI). */
4243 tree void_ftype_pint_v4si
4244 = build_function_type (void_type_node,
4245 tree_cons (NULL_TREE, pint_type_node,
4246 tree_cons (NULL_TREE, V4SI_type_node,
4247 endlink)));
4248 /* void foo (short *, V8HI). */
4249 tree void_ftype_pshort_v8hi
4250 = build_function_type (void_type_node,
4251 tree_cons (NULL_TREE, pshort_type_node,
4252 tree_cons (NULL_TREE, V8HI_type_node,
4253 endlink)));
4254 /* void foo (char *, V16QI). */
4255 tree void_ftype_pchar_v16qi
4256 = build_function_type (void_type_node,
4257 tree_cons (NULL_TREE, pchar_type_node,
4258 tree_cons (NULL_TREE, V16QI_type_node,
4259 endlink)));
4260 /* void foo (float *, V4SF). */
4261 tree void_ftype_pfloat_v4sf
4262 = build_function_type (void_type_node,
4263 tree_cons (NULL_TREE, pfloat_type_node,
4264 tree_cons (NULL_TREE, V4SF_type_node,
4265 endlink)));
4266
4267 /* void foo (V4SI). */
4268 tree void_ftype_v4si
4269 = build_function_type (void_type_node,
4270 tree_cons (NULL_TREE, V4SI_type_node,
4271 endlink));
4272
4273 /* void foo (vint, int, void *). */
4274 tree void_ftype_v4si_int_pvoid
4275 = build_function_type (void_type_node,
4276 tree_cons (NULL_TREE, V4SI_type_node,
4277 tree_cons (NULL_TREE, integer_type_node,
4278 tree_cons (NULL_TREE,
4279 pvoid_type_node,
4280 endlink))));
4281
4282 /* void foo (vchar, int, void *). */
4283 tree void_ftype_v16qi_int_pvoid
4284 = build_function_type (void_type_node,
4285 tree_cons (NULL_TREE, V16QI_type_node,
4286 tree_cons (NULL_TREE, integer_type_node,
4287 tree_cons (NULL_TREE,
4288 pvoid_type_node,
4289 endlink))));
4290
4291 /* void foo (vshort, int, void *). */
4292 tree void_ftype_v8hi_int_pvoid
4293 = build_function_type (void_type_node,
4294 tree_cons (NULL_TREE, V8HI_type_node,
4295 tree_cons (NULL_TREE, integer_type_node,
4296 tree_cons (NULL_TREE,
4297 pvoid_type_node,
4298 endlink))));
4299
4300 /* void foo (char). */
4301 tree void_ftype_qi
4302 = build_function_type (void_type_node,
4303 tree_cons (NULL_TREE, char_type_node,
4304 endlink));
4305
4306 /* void foo (void). */
4307 tree void_ftype_void
4308 = build_function_type (void_type_node, void_list_node);
4309
4310 /* vshort foo (void). */
4311 tree v8hi_ftype_void
4312 = build_function_type (V8HI_type_node, void_list_node);
4313
4314 tree v4si_ftype_v4si_v4si
4315 = build_function_type (V4SI_type_node,
4316 tree_cons (NULL_TREE, V4SI_type_node,
4317 tree_cons (NULL_TREE, V4SI_type_node,
4318 endlink)));
4319
4320 /* These are for the unsigned 5 bit literals. */
4321
4322 tree v4sf_ftype_v4si_char
4323 = build_function_type (V4SF_type_node,
4324 tree_cons (NULL_TREE, V4SI_type_node,
4325 tree_cons (NULL_TREE, char_type_node,
4326 endlink)));
4327 tree v4si_ftype_v4sf_char
4328 = build_function_type (V4SI_type_node,
4329 tree_cons (NULL_TREE, V4SF_type_node,
4330 tree_cons (NULL_TREE, char_type_node,
4331 endlink)));
4332 tree v4si_ftype_v4si_char
4333 = build_function_type (V4SI_type_node,
4334 tree_cons (NULL_TREE, V4SI_type_node,
4335 tree_cons (NULL_TREE, char_type_node,
4336 endlink)));
4337 tree v8hi_ftype_v8hi_char
4338 = build_function_type (V8HI_type_node,
4339 tree_cons (NULL_TREE, V8HI_type_node,
4340 tree_cons (NULL_TREE, char_type_node,
4341 endlink)));
4342 tree v16qi_ftype_v16qi_char
4343 = build_function_type (V16QI_type_node,
4344 tree_cons (NULL_TREE, V16QI_type_node,
4345 tree_cons (NULL_TREE, char_type_node,
4346 endlink)));
4347
4348 /* These are for the unsigned 4 bit literals. */
4349
4350 tree v16qi_ftype_v16qi_v16qi_char
4351 = build_function_type (V16QI_type_node,
4352 tree_cons (NULL_TREE, V16QI_type_node,
4353 tree_cons (NULL_TREE, V16QI_type_node,
4354 tree_cons (NULL_TREE,
4355 char_type_node,
4356 endlink))));
4357
4358 tree v8hi_ftype_v8hi_v8hi_char
4359 = build_function_type (V8HI_type_node,
4360 tree_cons (NULL_TREE, V8HI_type_node,
4361 tree_cons (NULL_TREE, V8HI_type_node,
4362 tree_cons (NULL_TREE,
4363 char_type_node,
4364 endlink))));
4365
4366 tree v4si_ftype_v4si_v4si_char
4367 = build_function_type (V4SI_type_node,
4368 tree_cons (NULL_TREE, V4SI_type_node,
4369 tree_cons (NULL_TREE, V4SI_type_node,
4370 tree_cons (NULL_TREE,
4371 char_type_node,
4372 endlink))));
4373
4374 tree v4sf_ftype_v4sf_v4sf_char
4375 = build_function_type (V4SF_type_node,
4376 tree_cons (NULL_TREE, V4SF_type_node,
4377 tree_cons (NULL_TREE, V4SF_type_node,
4378 tree_cons (NULL_TREE,
4379 char_type_node,
4380 endlink))));
4381
4382 /* End of 4 bit literals. */
4383
4384 tree v4sf_ftype_v4sf_v4sf
4385 = build_function_type (V4SF_type_node,
4386 tree_cons (NULL_TREE, V4SF_type_node,
4387 tree_cons (NULL_TREE, V4SF_type_node,
4388 endlink)));
4389 tree v4sf_ftype_v4sf_v4sf_v4si
4390 = build_function_type (V4SF_type_node,
4391 tree_cons (NULL_TREE, V4SF_type_node,
4392 tree_cons (NULL_TREE, V4SF_type_node,
4393 tree_cons (NULL_TREE,
4394 V4SI_type_node,
4395 endlink))));
4396 tree v4sf_ftype_v4sf_v4sf_v4sf
4397 = build_function_type (V4SF_type_node,
4398 tree_cons (NULL_TREE, V4SF_type_node,
4399 tree_cons (NULL_TREE, V4SF_type_node,
4400 tree_cons (NULL_TREE,
4401 V4SF_type_node,
4402 endlink))));
4403 tree v4si_ftype_v4si_v4si_v4si
4404 = build_function_type (V4SI_type_node,
4405 tree_cons (NULL_TREE, V4SI_type_node,
4406 tree_cons (NULL_TREE, V4SI_type_node,
4407 tree_cons (NULL_TREE,
4408 V4SI_type_node,
4409 endlink))));
4410
4411 tree v8hi_ftype_v8hi_v8hi
4412 = build_function_type (V8HI_type_node,
4413 tree_cons (NULL_TREE, V8HI_type_node,
4414 tree_cons (NULL_TREE, V8HI_type_node,
4415 endlink)));
4416 tree v8hi_ftype_v8hi_v8hi_v8hi
4417 = build_function_type (V8HI_type_node,
4418 tree_cons (NULL_TREE, V8HI_type_node,
4419 tree_cons (NULL_TREE, V8HI_type_node,
4420 tree_cons (NULL_TREE,
4421 V8HI_type_node,
4422 endlink))));
4423 tree v4si_ftype_v8hi_v8hi_v4si
4424 = build_function_type (V4SI_type_node,
4425 tree_cons (NULL_TREE, V8HI_type_node,
4426 tree_cons (NULL_TREE, V8HI_type_node,
4427 tree_cons (NULL_TREE,
4428 V4SI_type_node,
4429 endlink))));
4430 tree v4si_ftype_v16qi_v16qi_v4si
4431 = build_function_type (V4SI_type_node,
4432 tree_cons (NULL_TREE, V16QI_type_node,
4433 tree_cons (NULL_TREE, V16QI_type_node,
4434 tree_cons (NULL_TREE,
4435 V4SI_type_node,
4436 endlink))));
4437
4438 tree v16qi_ftype_v16qi_v16qi
4439 = build_function_type (V16QI_type_node,
4440 tree_cons (NULL_TREE, V16QI_type_node,
4441 tree_cons (NULL_TREE, V16QI_type_node,
4442 endlink)));
4443
4444 tree v4si_ftype_v4sf_v4sf
4445 = build_function_type (V4SI_type_node,
4446 tree_cons (NULL_TREE, V4SF_type_node,
4447 tree_cons (NULL_TREE, V4SF_type_node,
4448 endlink)));
4449
4450 tree v4si_ftype_v4si
4451 = build_function_type (V4SI_type_node,
4452 tree_cons (NULL_TREE, V4SI_type_node, endlink));
4453
4454 tree v8hi_ftype_v8hi
4455 = build_function_type (V8HI_type_node,
4456 tree_cons (NULL_TREE, V8HI_type_node, endlink));
4457
4458 tree v16qi_ftype_v16qi
4459 = build_function_type (V16QI_type_node,
4460 tree_cons (NULL_TREE, V16QI_type_node, endlink));
4461
4462 tree v8hi_ftype_v16qi_v16qi
4463 = build_function_type (V8HI_type_node,
4464 tree_cons (NULL_TREE, V16QI_type_node,
4465 tree_cons (NULL_TREE, V16QI_type_node,
4466 endlink)));
4467
4468 tree v4si_ftype_v8hi_v8hi
4469 = build_function_type (V4SI_type_node,
4470 tree_cons (NULL_TREE, V8HI_type_node,
4471 tree_cons (NULL_TREE, V8HI_type_node,
4472 endlink)));
4473
4474 tree v8hi_ftype_v4si_v4si
4475 = build_function_type (V8HI_type_node,
4476 tree_cons (NULL_TREE, V4SI_type_node,
4477 tree_cons (NULL_TREE, V4SI_type_node,
4478 endlink)));
4479
4480 tree v16qi_ftype_v8hi_v8hi
4481 = build_function_type (V16QI_type_node,
4482 tree_cons (NULL_TREE, V8HI_type_node,
4483 tree_cons (NULL_TREE, V8HI_type_node,
4484 endlink)));
4485
4486 tree v4si_ftype_v16qi_v4si
4487 = build_function_type (V4SI_type_node,
4488 tree_cons (NULL_TREE, V16QI_type_node,
4489 tree_cons (NULL_TREE, V4SI_type_node,
4490 endlink)));
4491
4492 tree v4si_ftype_v16qi_v16qi
4493 = build_function_type (V4SI_type_node,
4494 tree_cons (NULL_TREE, V16QI_type_node,
4495 tree_cons (NULL_TREE, V16QI_type_node,
4496 endlink)));
4497
4498 tree v4si_ftype_v8hi_v4si
4499 = build_function_type (V4SI_type_node,
4500 tree_cons (NULL_TREE, V8HI_type_node,
4501 tree_cons (NULL_TREE, V4SI_type_node,
4502 endlink)));
4503
4504 tree v4si_ftype_v8hi
4505 = build_function_type (V4SI_type_node,
4506 tree_cons (NULL_TREE, V8HI_type_node, endlink));
4507
4508 tree int_ftype_v4si_v4si
4509 = build_function_type (integer_type_node,
4510 tree_cons (NULL_TREE, V4SI_type_node,
4511 tree_cons (NULL_TREE, V4SI_type_node,
4512 endlink)));
4513
4514 tree int_ftype_v4sf_v4sf
4515 = build_function_type (integer_type_node,
4516 tree_cons (NULL_TREE, V4SF_type_node,
4517 tree_cons (NULL_TREE, V4SF_type_node,
4518 endlink)));
4519
4520 tree int_ftype_v16qi_v16qi
4521 = build_function_type (integer_type_node,
4522 tree_cons (NULL_TREE, V16QI_type_node,
4523 tree_cons (NULL_TREE, V16QI_type_node,
4524 endlink)));
4525
4526 tree int_ftype_int_v4si_v4si
4527 = build_function_type
4528 (integer_type_node,
4529 tree_cons (NULL_TREE, integer_type_node,
4530 tree_cons (NULL_TREE, V4SI_type_node,
4531 tree_cons (NULL_TREE, V4SI_type_node,
4532 endlink))));
4533
4534 tree int_ftype_int_v4sf_v4sf
4535 = build_function_type
4536 (integer_type_node,
4537 tree_cons (NULL_TREE, integer_type_node,
4538 tree_cons (NULL_TREE, V4SF_type_node,
4539 tree_cons (NULL_TREE, V4SF_type_node,
4540 endlink))));
4541
4542 tree int_ftype_int_v8hi_v8hi
4543 = build_function_type
4544 (integer_type_node,
4545 tree_cons (NULL_TREE, integer_type_node,
4546 tree_cons (NULL_TREE, V8HI_type_node,
4547 tree_cons (NULL_TREE, V8HI_type_node,
4548 endlink))));
4549
4550 tree int_ftype_int_v16qi_v16qi
4551 = build_function_type
4552 (integer_type_node,
4553 tree_cons (NULL_TREE, integer_type_node,
4554 tree_cons (NULL_TREE, V16QI_type_node,
4555 tree_cons (NULL_TREE, V16QI_type_node,
4556 endlink))));
4557
4558 tree v16qi_ftype_int_pvoid
4559 = build_function_type (V16QI_type_node,
4560 tree_cons (NULL_TREE, integer_type_node,
4561 tree_cons (NULL_TREE, pvoid_type_node,
4562 endlink)));
4563
4564 tree v4si_ftype_int_pvoid
4565 = build_function_type (V4SI_type_node,
4566 tree_cons (NULL_TREE, integer_type_node,
4567 tree_cons (NULL_TREE, pvoid_type_node,
4568 endlink)));
4569
4570 tree v8hi_ftype_int_pvoid
4571 = build_function_type (V8HI_type_node,
4572 tree_cons (NULL_TREE, integer_type_node,
4573 tree_cons (NULL_TREE, pvoid_type_node,
4574 endlink)));
4575
4576 tree int_ftype_v8hi_v8hi
4577 = build_function_type (integer_type_node,
4578 tree_cons (NULL_TREE, V8HI_type_node,
4579 tree_cons (NULL_TREE, V8HI_type_node,
4580 endlink)));
4581
4582 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pfloat, ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
4583 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf, ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
4584 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pint, ALTIVEC_BUILTIN_LD_INTERNAL_4si);
4585 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si, ALTIVEC_BUILTIN_ST_INTERNAL_4si);
4586 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pshort, ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
4587 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi, ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
4588 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pchar, ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
4589 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi, ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
4590 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
4591 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
4592 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
4593 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
4594 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSL);
4595 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSR);
4596 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEBX);
4597 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEHX);
4598 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEWX);
4599 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVXL);
4600 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVX);
4601 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
4602 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
4603 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
4604 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
4605 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
4606
4607 /* Add the simple ternary operators. */
4608 d = (struct builtin_description *) bdesc_3arg;
4609 for (i = 0; i < sizeof (bdesc_3arg) / sizeof *d; i++, d++)
4610 {
4611
4612 enum machine_mode mode0, mode1, mode2, mode3;
4613 tree type;
4614
4615 if (d->name == 0)
4616 continue;
4617
4618 mode0 = insn_data[d->icode].operand[0].mode;
4619 mode1 = insn_data[d->icode].operand[1].mode;
4620 mode2 = insn_data[d->icode].operand[2].mode;
4621 mode3 = insn_data[d->icode].operand[3].mode;
4622
4623 /* When all four are of the same mode. */
4624 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
4625 {
4626 switch (mode0)
4627 {
4628 case V4SImode:
4629 type = v4si_ftype_v4si_v4si_v4si;
4630 break;
4631 case V4SFmode:
4632 type = v4sf_ftype_v4sf_v4sf_v4sf;
4633 break;
4634 case V8HImode:
4635 type = v8hi_ftype_v8hi_v8hi_v8hi;
4636 break;
4637 case V16QImode:
4638 type = v16qi_ftype_v16qi_v16qi_v16qi;
4639 break;
4640 default:
4641 abort();
4642 }
4643 }
4644 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
4645 {
4646 switch (mode0)
4647 {
4648 case V4SImode:
4649 type = v4si_ftype_v4si_v4si_v16qi;
4650 break;
4651 case V4SFmode:
4652 type = v4sf_ftype_v4sf_v4sf_v16qi;
4653 break;
4654 case V8HImode:
4655 type = v8hi_ftype_v8hi_v8hi_v16qi;
4656 break;
4657 case V16QImode:
4658 type = v16qi_ftype_v16qi_v16qi_v16qi;
4659 break;
4660 default:
4661 abort();
4662 }
4663 }
4664 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
4665 && mode3 == V4SImode)
4666 type = v4si_ftype_v16qi_v16qi_v4si;
4667 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
4668 && mode3 == V4SImode)
4669 type = v4si_ftype_v8hi_v8hi_v4si;
4670 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
4671 && mode3 == V4SImode)
4672 type = v4sf_ftype_v4sf_v4sf_v4si;
4673
4674 /* vchar, vchar, vchar, 4 bit literal. */
4675 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
4676 && mode3 == QImode)
4677 type = v16qi_ftype_v16qi_v16qi_char;
4678
4679 /* vshort, vshort, vshort, 4 bit literal. */
4680 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
4681 && mode3 == QImode)
4682 type = v8hi_ftype_v8hi_v8hi_char;
4683
4684 /* vint, vint, vint, 4 bit literal. */
4685 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
4686 && mode3 == QImode)
4687 type = v4si_ftype_v4si_v4si_char;
4688
4689 /* vfloat, vfloat, vfloat, 4 bit literal. */
4690 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
4691 && mode3 == QImode)
4692 type = v4sf_ftype_v4sf_v4sf_char;
4693
4694 else
4695 abort ();
4696
4697 def_builtin (d->mask, d->name, type, d->code);
4698 }
4699
4700 /* Add the DST variants. */
4701 d = (struct builtin_description *) bdesc_dst;
4702 for (i = 0; i < sizeof (bdesc_dst) / sizeof *d; i++, d++)
4703 def_builtin (d->mask, d->name, void_ftype_pvoid_int_char, d->code);
4704
4705 /* Initialize the predicates. */
4706 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4707 for (i = 0; i < sizeof (bdesc_altivec_preds) / sizeof *dp; i++, dp++)
4708 {
4709 enum machine_mode mode1;
4710 tree type;
4711
4712 mode1 = insn_data[dp->icode].operand[1].mode;
4713
4714 switch (mode1)
4715 {
4716 case V4SImode:
4717 type = int_ftype_int_v4si_v4si;
4718 break;
4719 case V8HImode:
4720 type = int_ftype_int_v8hi_v8hi;
4721 break;
4722 case V16QImode:
4723 type = int_ftype_int_v16qi_v16qi;
4724 break;
4725 case V4SFmode:
4726 type = int_ftype_int_v4sf_v4sf;
4727 break;
4728 default:
4729 abort ();
4730 }
4731
4732 def_builtin (dp->mask, dp->name, type, dp->code);
4733 }
4734
4735 /* Add the simple binary operators. */
4736 d = (struct builtin_description *) bdesc_2arg;
4737 for (i = 0; i < sizeof (bdesc_2arg) / sizeof *d; i++, d++)
4738 {
4739 enum machine_mode mode0, mode1, mode2;
4740 tree type;
4741
4742 if (d->name == 0)
4743 continue;
4744
4745 mode0 = insn_data[d->icode].operand[0].mode;
4746 mode1 = insn_data[d->icode].operand[1].mode;
4747 mode2 = insn_data[d->icode].operand[2].mode;
4748
4749 /* When all three operands are of the same mode. */
4750 if (mode0 == mode1 && mode1 == mode2)
4751 {
4752 switch (mode0)
4753 {
4754 case V4SFmode:
4755 type = v4sf_ftype_v4sf_v4sf;
4756 break;
4757 case V4SImode:
4758 type = v4si_ftype_v4si_v4si;
4759 break;
4760 case V16QImode:
4761 type = v16qi_ftype_v16qi_v16qi;
4762 break;
4763 case V8HImode:
4764 type = v8hi_ftype_v8hi_v8hi;
4765 break;
4766 default:
4767 abort ();
4768 }
4769 }
4770
4771 /* A few other combos we really don't want to do manually. */
4772
4773 /* vint, vfloat, vfloat. */
4774 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
4775 type = v4si_ftype_v4sf_v4sf;
4776
4777 /* vshort, vchar, vchar. */
4778 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
4779 type = v8hi_ftype_v16qi_v16qi;
4780
4781 /* vint, vshort, vshort. */
4782 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
4783 type = v4si_ftype_v8hi_v8hi;
4784
4785 /* vshort, vint, vint. */
4786 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
4787 type = v8hi_ftype_v4si_v4si;
4788
4789 /* vchar, vshort, vshort. */
4790 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
4791 type = v16qi_ftype_v8hi_v8hi;
4792
4793 /* vint, vchar, vint. */
4794 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
4795 type = v4si_ftype_v16qi_v4si;
4796
4797 /* vint, vchar, vchar. */
4798 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
4799 type = v4si_ftype_v16qi_v16qi;
4800
4801 /* vint, vshort, vint. */
4802 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
4803 type = v4si_ftype_v8hi_v4si;
4804
4805 /* vint, vint, 5 bit literal. */
4806 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
4807 type = v4si_ftype_v4si_char;
4808
4809 /* vshort, vshort, 5 bit literal. */
4810 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
4811 type = v8hi_ftype_v8hi_char;
4812
4813 /* vchar, vchar, 5 bit literal. */
4814 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
4815 type = v16qi_ftype_v16qi_char;
4816
4817 /* vfloat, vint, 5 bit literal. */
4818 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
4819 type = v4sf_ftype_v4si_char;
4820
4821 /* vint, vfloat, 5 bit literal. */
4822 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
4823 type = v4si_ftype_v4sf_char;
4824
4825 /* int, x, x. */
4826 else if (mode0 == SImode)
4827 {
4828 switch (mode1)
4829 {
4830 case V4SImode:
4831 type = int_ftype_v4si_v4si;
4832 break;
4833 case V4SFmode:
4834 type = int_ftype_v4sf_v4sf;
4835 break;
4836 case V16QImode:
4837 type = int_ftype_v16qi_v16qi;
4838 break;
4839 case V8HImode:
4840 type = int_ftype_v8hi_v8hi;
4841 break;
4842 default:
4843 abort ();
4844 }
4845 }
4846
4847 else
4848 abort ();
4849
4850 def_builtin (d->mask, d->name, type, d->code);
4851 }
4852
4853 /* Initialize the abs* operators. */
4854 d = (struct builtin_description *) bdesc_abs;
4855 for (i = 0; i < sizeof (bdesc_abs) / sizeof *d; i++, d++)
4856 {
4857 enum machine_mode mode0;
4858 tree type;
4859
4860 mode0 = insn_data[d->icode].operand[0].mode;
4861
4862 switch (mode0)
4863 {
4864 case V4SImode:
4865 type = v4si_ftype_v4si;
4866 break;
4867 case V8HImode:
4868 type = v8hi_ftype_v8hi;
4869 break;
4870 case V16QImode:
4871 type = v16qi_ftype_v16qi;
4872 break;
4873 case V4SFmode:
4874 type = v4sf_ftype_v4sf;
4875 break;
4876 default:
4877 abort ();
4878 }
4879
4880 def_builtin (d->mask, d->name, type, d->code);
4881 }
4882
4883 /* Add the simple unary operators. */
4884 d = (struct builtin_description *) bdesc_1arg;
4885 for (i = 0; i < sizeof (bdesc_1arg) / sizeof *d; i++, d++)
4886 {
4887 enum machine_mode mode0, mode1;
4888 tree type;
4889
4890 if (d->name == 0)
4891 continue;
4892
4893 mode0 = insn_data[d->icode].operand[0].mode;
4894 mode1 = insn_data[d->icode].operand[1].mode;
4895
4896 if (mode0 == V4SImode && mode1 == QImode)
4897 type = v4si_ftype_char;
4898 else if (mode0 == V8HImode && mode1 == QImode)
4899 type = v8hi_ftype_char;
4900 else if (mode0 == V16QImode && mode1 == QImode)
4901 type = v16qi_ftype_char;
4902 else if (mode0 == V4SFmode && mode1 == V4SFmode)
4903 type = v4sf_ftype_v4sf;
4904 else if (mode0 == V8HImode && mode1 == V16QImode)
4905 type = v8hi_ftype_v16qi;
4906 else if (mode0 == V4SImode && mode1 == V8HImode)
4907 type = v4si_ftype_v8hi;
4908 else
4909 abort ();
4910
4911 def_builtin (d->mask, d->name, type, d->code);
4912 }
4913 }
4914
4915 \f
4916 /* Generate a memory reference for expand_block_move, copying volatile,
4917 and other bits from an original memory reference. */
4918
4919 static rtx
4920 expand_block_move_mem (mode, addr, orig_mem)
4921 enum machine_mode mode;
4922 rtx addr;
4923 rtx orig_mem;
4924 {
4925 rtx mem = gen_rtx_MEM (mode, addr);
4926
4927 MEM_COPY_ATTRIBUTES (mem, orig_mem);
4928 return mem;
4929 }
4930
4931 /* Expand a block move operation, and return 1 if successful. Return 0
4932 if we should let the compiler generate normal code.
4933
4934 operands[0] is the destination
4935 operands[1] is the source
4936 operands[2] is the length
4937 operands[3] is the alignment */
4938
4939 #define MAX_MOVE_REG 4
4940
4941 int
4942 expand_block_move (operands)
4943 rtx operands[];
4944 {
4945 rtx orig_dest = operands[0];
4946 rtx orig_src = operands[1];
4947 rtx bytes_rtx = operands[2];
4948 rtx align_rtx = operands[3];
4949 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
4950 int align;
4951 int bytes;
4952 int offset;
4953 int num_reg;
4954 int i;
4955 rtx src_reg;
4956 rtx dest_reg;
4957 rtx src_addr;
4958 rtx dest_addr;
4959 rtx tmp_reg;
4960 rtx stores[MAX_MOVE_REG];
4961 int move_bytes;
4962
4963 /* If this is not a fixed size move, just call memcpy */
4964 if (! constp)
4965 return 0;
4966
4967 /* If this is not a fixed size alignment, abort */
4968 if (GET_CODE (align_rtx) != CONST_INT)
4969 abort ();
4970 align = INTVAL (align_rtx);
4971
4972 /* Anything to move? */
4973 bytes = INTVAL (bytes_rtx);
4974 if (bytes <= 0)
4975 return 1;
4976
4977 /* store_one_arg depends on expand_block_move to handle at least the size of
4978 reg_parm_stack_space. */
4979 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
4980 return 0;
4981
4982 /* Move the address into scratch registers. */
4983 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
4984 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
4985
4986 if (TARGET_STRING) /* string instructions are available */
4987 {
4988 for ( ; bytes > 0; bytes -= move_bytes)
4989 {
4990 if (bytes > 24 /* move up to 32 bytes at a time */
4991 && ! fixed_regs[5]
4992 && ! fixed_regs[6]
4993 && ! fixed_regs[7]
4994 && ! fixed_regs[8]
4995 && ! fixed_regs[9]
4996 && ! fixed_regs[10]
4997 && ! fixed_regs[11]
4998 && ! fixed_regs[12])
4999 {
5000 move_bytes = (bytes > 32) ? 32 : bytes;
5001 emit_insn (gen_movstrsi_8reg (expand_block_move_mem (BLKmode,
5002 dest_reg,
5003 orig_dest),
5004 expand_block_move_mem (BLKmode,
5005 src_reg,
5006 orig_src),
5007 GEN_INT ((move_bytes == 32)
5008 ? 0 : move_bytes),
5009 align_rtx));
5010 }
5011 else if (bytes > 16 /* move up to 24 bytes at a time */
5012 && ! fixed_regs[5]
5013 && ! fixed_regs[6]
5014 && ! fixed_regs[7]
5015 && ! fixed_regs[8]
5016 && ! fixed_regs[9]
5017 && ! fixed_regs[10])
5018 {
5019 move_bytes = (bytes > 24) ? 24 : bytes;
5020 emit_insn (gen_movstrsi_6reg (expand_block_move_mem (BLKmode,
5021 dest_reg,
5022 orig_dest),
5023 expand_block_move_mem (BLKmode,
5024 src_reg,
5025 orig_src),
5026 GEN_INT (move_bytes),
5027 align_rtx));
5028 }
5029 else if (bytes > 8 /* move up to 16 bytes at a time */
5030 && ! fixed_regs[5]
5031 && ! fixed_regs[6]
5032 && ! fixed_regs[7]
5033 && ! fixed_regs[8])
5034 {
5035 move_bytes = (bytes > 16) ? 16 : bytes;
5036 emit_insn (gen_movstrsi_4reg (expand_block_move_mem (BLKmode,
5037 dest_reg,
5038 orig_dest),
5039 expand_block_move_mem (BLKmode,
5040 src_reg,
5041 orig_src),
5042 GEN_INT (move_bytes),
5043 align_rtx));
5044 }
5045 else if (bytes >= 8 && TARGET_POWERPC64
5046 /* 64-bit loads and stores require word-aligned
5047 displacements. */
5048 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
5049 {
5050 move_bytes = 8;
5051 tmp_reg = gen_reg_rtx (DImode);
5052 emit_move_insn (tmp_reg,
5053 expand_block_move_mem (DImode,
5054 src_reg, orig_src));
5055 emit_move_insn (expand_block_move_mem (DImode,
5056 dest_reg, orig_dest),
5057 tmp_reg);
5058 }
5059 else if (bytes > 4 && !TARGET_POWERPC64)
5060 { /* move up to 8 bytes at a time */
5061 move_bytes = (bytes > 8) ? 8 : bytes;
5062 emit_insn (gen_movstrsi_2reg (expand_block_move_mem (BLKmode,
5063 dest_reg,
5064 orig_dest),
5065 expand_block_move_mem (BLKmode,
5066 src_reg,
5067 orig_src),
5068 GEN_INT (move_bytes),
5069 align_rtx));
5070 }
5071 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
5072 { /* move 4 bytes */
5073 move_bytes = 4;
5074 tmp_reg = gen_reg_rtx (SImode);
5075 emit_move_insn (tmp_reg,
5076 expand_block_move_mem (SImode,
5077 src_reg, orig_src));
5078 emit_move_insn (expand_block_move_mem (SImode,
5079 dest_reg, orig_dest),
5080 tmp_reg);
5081 }
5082 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
5083 { /* move 2 bytes */
5084 move_bytes = 2;
5085 tmp_reg = gen_reg_rtx (HImode);
5086 emit_move_insn (tmp_reg,
5087 expand_block_move_mem (HImode,
5088 src_reg, orig_src));
5089 emit_move_insn (expand_block_move_mem (HImode,
5090 dest_reg, orig_dest),
5091 tmp_reg);
5092 }
5093 else if (bytes == 1) /* move 1 byte */
5094 {
5095 move_bytes = 1;
5096 tmp_reg = gen_reg_rtx (QImode);
5097 emit_move_insn (tmp_reg,
5098 expand_block_move_mem (QImode,
5099 src_reg, orig_src));
5100 emit_move_insn (expand_block_move_mem (QImode,
5101 dest_reg, orig_dest),
5102 tmp_reg);
5103 }
5104 else
5105 { /* move up to 4 bytes at a time */
5106 move_bytes = (bytes > 4) ? 4 : bytes;
5107 emit_insn (gen_movstrsi_1reg (expand_block_move_mem (BLKmode,
5108 dest_reg,
5109 orig_dest),
5110 expand_block_move_mem (BLKmode,
5111 src_reg,
5112 orig_src),
5113 GEN_INT (move_bytes),
5114 align_rtx));
5115 }
5116
5117 if (bytes > move_bytes)
5118 {
5119 if (! TARGET_POWERPC64)
5120 {
5121 emit_insn (gen_addsi3 (src_reg, src_reg,
5122 GEN_INT (move_bytes)));
5123 emit_insn (gen_addsi3 (dest_reg, dest_reg,
5124 GEN_INT (move_bytes)));
5125 }
5126 else
5127 {
5128 emit_insn (gen_adddi3 (src_reg, src_reg,
5129 GEN_INT (move_bytes)));
5130 emit_insn (gen_adddi3 (dest_reg, dest_reg,
5131 GEN_INT (move_bytes)));
5132 }
5133 }
5134 }
5135 }
5136
5137 else /* string instructions not available */
5138 {
5139 num_reg = offset = 0;
5140 for ( ; bytes > 0; (bytes -= move_bytes), (offset += move_bytes))
5141 {
5142 /* Calculate the correct offset for src/dest */
5143 if (offset == 0)
5144 {
5145 src_addr = src_reg;
5146 dest_addr = dest_reg;
5147 }
5148 else
5149 {
5150 src_addr = plus_constant (src_reg, offset);
5151 dest_addr = plus_constant (dest_reg, offset);
5152 }
5153
5154 /* Generate the appropriate load and store, saving the stores
5155 for later. */
5156 if (bytes >= 8 && TARGET_POWERPC64
5157 /* 64-bit loads and stores require word-aligned
5158 displacements. */
5159 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
5160 {
5161 move_bytes = 8;
5162 tmp_reg = gen_reg_rtx (DImode);
5163 emit_insn (gen_movdi (tmp_reg,
5164 expand_block_move_mem (DImode,
5165 src_addr,
5166 orig_src)));
5167 stores[num_reg++] = gen_movdi (expand_block_move_mem (DImode,
5168 dest_addr,
5169 orig_dest),
5170 tmp_reg);
5171 }
5172 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
5173 {
5174 move_bytes = 4;
5175 tmp_reg = gen_reg_rtx (SImode);
5176 emit_insn (gen_movsi (tmp_reg,
5177 expand_block_move_mem (SImode,
5178 src_addr,
5179 orig_src)));
5180 stores[num_reg++] = gen_movsi (expand_block_move_mem (SImode,
5181 dest_addr,
5182 orig_dest),
5183 tmp_reg);
5184 }
5185 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
5186 {
5187 move_bytes = 2;
5188 tmp_reg = gen_reg_rtx (HImode);
5189 emit_insn (gen_movhi (tmp_reg,
5190 expand_block_move_mem (HImode,
5191 src_addr,
5192 orig_src)));
5193 stores[num_reg++] = gen_movhi (expand_block_move_mem (HImode,
5194 dest_addr,
5195 orig_dest),
5196 tmp_reg);
5197 }
5198 else
5199 {
5200 move_bytes = 1;
5201 tmp_reg = gen_reg_rtx (QImode);
5202 emit_insn (gen_movqi (tmp_reg,
5203 expand_block_move_mem (QImode,
5204 src_addr,
5205 orig_src)));
5206 stores[num_reg++] = gen_movqi (expand_block_move_mem (QImode,
5207 dest_addr,
5208 orig_dest),
5209 tmp_reg);
5210 }
5211
5212 if (num_reg >= MAX_MOVE_REG)
5213 {
5214 for (i = 0; i < num_reg; i++)
5215 emit_insn (stores[i]);
5216 num_reg = 0;
5217 }
5218 }
5219
5220 for (i = 0; i < num_reg; i++)
5221 emit_insn (stores[i]);
5222 }
5223
5224 return 1;
5225 }
5226
5227 \f
5228 /* Return 1 if OP is a load multiple operation. It is known to be a
5229 PARALLEL and the first section will be tested. */
5230
5231 int
5232 load_multiple_operation (op, mode)
5233 rtx op;
5234 enum machine_mode mode ATTRIBUTE_UNUSED;
5235 {
5236 int count = XVECLEN (op, 0);
5237 unsigned int dest_regno;
5238 rtx src_addr;
5239 int i;
5240
5241 /* Perform a quick check so we don't blow up below. */
5242 if (count <= 1
5243 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5244 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5245 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5246 return 0;
5247
5248 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5249 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5250
5251 for (i = 1; i < count; i++)
5252 {
5253 rtx elt = XVECEXP (op, 0, i);
5254
5255 if (GET_CODE (elt) != SET
5256 || GET_CODE (SET_DEST (elt)) != REG
5257 || GET_MODE (SET_DEST (elt)) != SImode
5258 || REGNO (SET_DEST (elt)) != dest_regno + i
5259 || GET_CODE (SET_SRC (elt)) != MEM
5260 || GET_MODE (SET_SRC (elt)) != SImode
5261 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
5262 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
5263 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
5264 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
5265 return 0;
5266 }
5267
5268 return 1;
5269 }
5270
5271 /* Similar, but tests for store multiple. Here, the second vector element
5272 is a CLOBBER. It will be tested later. */
5273
5274 int
5275 store_multiple_operation (op, mode)
5276 rtx op;
5277 enum machine_mode mode ATTRIBUTE_UNUSED;
5278 {
5279 int count = XVECLEN (op, 0) - 1;
5280 unsigned int src_regno;
5281 rtx dest_addr;
5282 int i;
5283
5284 /* Perform a quick check so we don't blow up below. */
5285 if (count <= 1
5286 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5287 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5288 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5289 return 0;
5290
5291 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5292 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5293
5294 for (i = 1; i < count; i++)
5295 {
5296 rtx elt = XVECEXP (op, 0, i + 1);
5297
5298 if (GET_CODE (elt) != SET
5299 || GET_CODE (SET_SRC (elt)) != REG
5300 || GET_MODE (SET_SRC (elt)) != SImode
5301 || REGNO (SET_SRC (elt)) != src_regno + i
5302 || GET_CODE (SET_DEST (elt)) != MEM
5303 || GET_MODE (SET_DEST (elt)) != SImode
5304 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
5305 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
5306 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
5307 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
5308 return 0;
5309 }
5310
5311 return 1;
5312 }
5313
5314 /* Return 1 for a parallel vrsave operation. */
5315
5316 int
5317 vrsave_operation (op, mode)
5318 rtx op;
5319 enum machine_mode mode ATTRIBUTE_UNUSED;
5320 {
5321 int count = XVECLEN (op, 0);
5322 unsigned int dest_regno, src_regno;
5323 int i;
5324
5325 if (count <= 1
5326 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5327 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5328 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
5329 return 0;
5330
5331 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5332 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5333
5334 if (dest_regno != VRSAVE_REGNO
5335 && src_regno != VRSAVE_REGNO)
5336 return 0;
5337
5338 for (i = 1; i < count; i++)
5339 {
5340 rtx elt = XVECEXP (op, 0, i);
5341
5342 if (GET_CODE (elt) != CLOBBER
5343 && GET_CODE (elt) != SET)
5344 return 0;
5345 }
5346
5347 return 1;
5348 }
5349
5350 /* Return 1 for an PARALLEL suitable for mtcrf. */
5351
5352 int
5353 mtcrf_operation (op, mode)
5354 rtx op;
5355 enum machine_mode mode ATTRIBUTE_UNUSED;
5356 {
5357 int count = XVECLEN (op, 0);
5358 int i;
5359 rtx src_reg;
5360
5361 /* Perform a quick check so we don't blow up below. */
5362 if (count < 1
5363 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5364 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
5365 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
5366 return 0;
5367 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
5368
5369 if (GET_CODE (src_reg) != REG
5370 || GET_MODE (src_reg) != SImode
5371 || ! INT_REGNO_P (REGNO (src_reg)))
5372 return 0;
5373
5374 for (i = 0; i < count; i++)
5375 {
5376 rtx exp = XVECEXP (op, 0, i);
5377 rtx unspec;
5378 int maskval;
5379
5380 if (GET_CODE (exp) != SET
5381 || GET_CODE (SET_DEST (exp)) != REG
5382 || GET_MODE (SET_DEST (exp)) != CCmode
5383 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
5384 return 0;
5385 unspec = SET_SRC (exp);
5386 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
5387
5388 if (GET_CODE (unspec) != UNSPEC
5389 || XINT (unspec, 1) != 20
5390 || XVECLEN (unspec, 0) != 2
5391 || XVECEXP (unspec, 0, 0) != src_reg
5392 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
5393 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
5394 return 0;
5395 }
5396 return 1;
5397 }
5398
5399 /* Return 1 for an PARALLEL suitable for lmw. */
5400
5401 int
5402 lmw_operation (op, mode)
5403 rtx op;
5404 enum machine_mode mode ATTRIBUTE_UNUSED;
5405 {
5406 int count = XVECLEN (op, 0);
5407 unsigned int dest_regno;
5408 rtx src_addr;
5409 unsigned int base_regno;
5410 HOST_WIDE_INT offset;
5411 int i;
5412
5413 /* Perform a quick check so we don't blow up below. */
5414 if (count <= 1
5415 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5416 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5417 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5418 return 0;
5419
5420 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5421 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5422
5423 if (dest_regno > 31
5424 || count != 32 - (int) dest_regno)
5425 return 0;
5426
5427 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
5428 {
5429 offset = 0;
5430 base_regno = REGNO (src_addr);
5431 if (base_regno == 0)
5432 return 0;
5433 }
5434 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
5435 {
5436 offset = INTVAL (XEXP (src_addr, 1));
5437 base_regno = REGNO (XEXP (src_addr, 0));
5438 }
5439 else
5440 return 0;
5441
5442 for (i = 0; i < count; i++)
5443 {
5444 rtx elt = XVECEXP (op, 0, i);
5445 rtx newaddr;
5446 rtx addr_reg;
5447 HOST_WIDE_INT newoffset;
5448
5449 if (GET_CODE (elt) != SET
5450 || GET_CODE (SET_DEST (elt)) != REG
5451 || GET_MODE (SET_DEST (elt)) != SImode
5452 || REGNO (SET_DEST (elt)) != dest_regno + i
5453 || GET_CODE (SET_SRC (elt)) != MEM
5454 || GET_MODE (SET_SRC (elt)) != SImode)
5455 return 0;
5456 newaddr = XEXP (SET_SRC (elt), 0);
5457 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5458 {
5459 newoffset = 0;
5460 addr_reg = newaddr;
5461 }
5462 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5463 {
5464 addr_reg = XEXP (newaddr, 0);
5465 newoffset = INTVAL (XEXP (newaddr, 1));
5466 }
5467 else
5468 return 0;
5469 if (REGNO (addr_reg) != base_regno
5470 || newoffset != offset + 4 * i)
5471 return 0;
5472 }
5473
5474 return 1;
5475 }
5476
5477 /* Return 1 for an PARALLEL suitable for stmw. */
5478
5479 int
5480 stmw_operation (op, mode)
5481 rtx op;
5482 enum machine_mode mode ATTRIBUTE_UNUSED;
5483 {
5484 int count = XVECLEN (op, 0);
5485 unsigned int src_regno;
5486 rtx dest_addr;
5487 unsigned int base_regno;
5488 HOST_WIDE_INT offset;
5489 int i;
5490
5491 /* Perform a quick check so we don't blow up below. */
5492 if (count <= 1
5493 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5494 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5495 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5496 return 0;
5497
5498 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5499 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5500
5501 if (src_regno > 31
5502 || count != 32 - (int) src_regno)
5503 return 0;
5504
5505 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
5506 {
5507 offset = 0;
5508 base_regno = REGNO (dest_addr);
5509 if (base_regno == 0)
5510 return 0;
5511 }
5512 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
5513 {
5514 offset = INTVAL (XEXP (dest_addr, 1));
5515 base_regno = REGNO (XEXP (dest_addr, 0));
5516 }
5517 else
5518 return 0;
5519
5520 for (i = 0; i < count; i++)
5521 {
5522 rtx elt = XVECEXP (op, 0, i);
5523 rtx newaddr;
5524 rtx addr_reg;
5525 HOST_WIDE_INT newoffset;
5526
5527 if (GET_CODE (elt) != SET
5528 || GET_CODE (SET_SRC (elt)) != REG
5529 || GET_MODE (SET_SRC (elt)) != SImode
5530 || REGNO (SET_SRC (elt)) != src_regno + i
5531 || GET_CODE (SET_DEST (elt)) != MEM
5532 || GET_MODE (SET_DEST (elt)) != SImode)
5533 return 0;
5534 newaddr = XEXP (SET_DEST (elt), 0);
5535 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5536 {
5537 newoffset = 0;
5538 addr_reg = newaddr;
5539 }
5540 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5541 {
5542 addr_reg = XEXP (newaddr, 0);
5543 newoffset = INTVAL (XEXP (newaddr, 1));
5544 }
5545 else
5546 return 0;
5547 if (REGNO (addr_reg) != base_regno
5548 || newoffset != offset + 4 * i)
5549 return 0;
5550 }
5551
5552 return 1;
5553 }
5554 \f
5555 /* A validation routine: say whether CODE, a condition code, and MODE
5556 match. The other alternatives either don't make sense or should
5557 never be generated. */
5558
5559 static void
5560 validate_condition_mode (code, mode)
5561 enum rtx_code code;
5562 enum machine_mode mode;
5563 {
5564 if (GET_RTX_CLASS (code) != '<'
5565 || GET_MODE_CLASS (mode) != MODE_CC)
5566 abort ();
5567
5568 /* These don't make sense. */
5569 if ((code == GT || code == LT || code == GE || code == LE)
5570 && mode == CCUNSmode)
5571 abort ();
5572
5573 if ((code == GTU || code == LTU || code == GEU || code == LEU)
5574 && mode != CCUNSmode)
5575 abort ();
5576
5577 if (mode != CCFPmode
5578 && (code == ORDERED || code == UNORDERED
5579 || code == UNEQ || code == LTGT
5580 || code == UNGT || code == UNLT
5581 || code == UNGE || code == UNLE))
5582 abort ();
5583
5584 /* These should never be generated except for
5585 flag_unsafe_math_optimizations. */
5586 if (mode == CCFPmode
5587 && ! flag_unsafe_math_optimizations
5588 && (code == LE || code == GE
5589 || code == UNEQ || code == LTGT
5590 || code == UNGT || code == UNLT))
5591 abort ();
5592
5593 /* These are invalid; the information is not there. */
5594 if (mode == CCEQmode
5595 && code != EQ && code != NE)
5596 abort ();
5597 }
5598
5599 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
5600 We only check the opcode against the mode of the CC value here. */
5601
5602 int
5603 branch_comparison_operator (op, mode)
5604 rtx op;
5605 enum machine_mode mode ATTRIBUTE_UNUSED;
5606 {
5607 enum rtx_code code = GET_CODE (op);
5608 enum machine_mode cc_mode;
5609
5610 if (GET_RTX_CLASS (code) != '<')
5611 return 0;
5612
5613 cc_mode = GET_MODE (XEXP (op, 0));
5614 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5615 return 0;
5616
5617 validate_condition_mode (code, cc_mode);
5618
5619 return 1;
5620 }
5621
5622 /* Return 1 if OP is a comparison operation that is valid for a branch
5623 insn and which is true if the corresponding bit in the CC register
5624 is set. */
5625
5626 int
5627 branch_positive_comparison_operator (op, mode)
5628 rtx op;
5629 enum machine_mode mode;
5630 {
5631 enum rtx_code code;
5632
5633 if (! branch_comparison_operator (op, mode))
5634 return 0;
5635
5636 code = GET_CODE (op);
5637 return (code == EQ || code == LT || code == GT
5638 || code == LTU || code == GTU
5639 || code == UNORDERED);
5640 }
5641
5642 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
5643 We check the opcode against the mode of the CC value and disallow EQ or
5644 NE comparisons for integers. */
5645
5646 int
5647 scc_comparison_operator (op, mode)
5648 rtx op;
5649 enum machine_mode mode;
5650 {
5651 enum rtx_code code = GET_CODE (op);
5652 enum machine_mode cc_mode;
5653
5654 if (GET_MODE (op) != mode && mode != VOIDmode)
5655 return 0;
5656
5657 if (GET_RTX_CLASS (code) != '<')
5658 return 0;
5659
5660 cc_mode = GET_MODE (XEXP (op, 0));
5661 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5662 return 0;
5663
5664 validate_condition_mode (code, cc_mode);
5665
5666 if (code == NE && cc_mode != CCFPmode)
5667 return 0;
5668
5669 return 1;
5670 }
5671
5672 int
5673 trap_comparison_operator (op, mode)
5674 rtx op;
5675 enum machine_mode mode;
5676 {
5677 if (mode != VOIDmode && mode != GET_MODE (op))
5678 return 0;
5679 return GET_RTX_CLASS (GET_CODE (op)) == '<';
5680 }
5681
5682 int
5683 boolean_operator (op, mode)
5684 rtx op;
5685 enum machine_mode mode ATTRIBUTE_UNUSED;
5686 {
5687 enum rtx_code code = GET_CODE (op);
5688 return (code == AND || code == IOR || code == XOR);
5689 }
5690
5691 int
5692 boolean_or_operator (op, mode)
5693 rtx op;
5694 enum machine_mode mode ATTRIBUTE_UNUSED;
5695 {
5696 enum rtx_code code = GET_CODE (op);
5697 return (code == IOR || code == XOR);
5698 }
5699
5700 int
5701 min_max_operator (op, mode)
5702 rtx op;
5703 enum machine_mode mode ATTRIBUTE_UNUSED;
5704 {
5705 enum rtx_code code = GET_CODE (op);
5706 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
5707 }
5708 \f
5709 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
5710 mask required to convert the result of a rotate insn into a shift
5711 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
5712
5713 int
5714 includes_lshift_p (shiftop, andop)
5715 rtx shiftop;
5716 rtx andop;
5717 {
5718 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5719
5720 shift_mask <<= INTVAL (shiftop);
5721
5722 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
5723 }
5724
5725 /* Similar, but for right shift. */
5726
5727 int
5728 includes_rshift_p (shiftop, andop)
5729 rtx shiftop;
5730 rtx andop;
5731 {
5732 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5733
5734 shift_mask >>= INTVAL (shiftop);
5735
5736 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
5737 }
5738
5739 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
5740 to perform a left shift. It must have exactly SHIFTOP least
5741 signifigant 0's, then one or more 1's, then zero or more 0's. */
5742
5743 int
5744 includes_rldic_lshift_p (shiftop, andop)
5745 rtx shiftop;
5746 rtx andop;
5747 {
5748 if (GET_CODE (andop) == CONST_INT)
5749 {
5750 HOST_WIDE_INT c, lsb, shift_mask;
5751
5752 c = INTVAL (andop);
5753 if (c == 0 || c == ~0)
5754 return 0;
5755
5756 shift_mask = ~0;
5757 shift_mask <<= INTVAL (shiftop);
5758
5759 /* Find the least signifigant one bit. */
5760 lsb = c & -c;
5761
5762 /* It must coincide with the LSB of the shift mask. */
5763 if (-lsb != shift_mask)
5764 return 0;
5765
5766 /* Invert to look for the next transition (if any). */
5767 c = ~c;
5768
5769 /* Remove the low group of ones (originally low group of zeros). */
5770 c &= -lsb;
5771
5772 /* Again find the lsb, and check we have all 1's above. */
5773 lsb = c & -c;
5774 return c == -lsb;
5775 }
5776 else if (GET_CODE (andop) == CONST_DOUBLE
5777 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5778 {
5779 HOST_WIDE_INT low, high, lsb;
5780 HOST_WIDE_INT shift_mask_low, shift_mask_high;
5781
5782 low = CONST_DOUBLE_LOW (andop);
5783 if (HOST_BITS_PER_WIDE_INT < 64)
5784 high = CONST_DOUBLE_HIGH (andop);
5785
5786 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
5787 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
5788 return 0;
5789
5790 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5791 {
5792 shift_mask_high = ~0;
5793 if (INTVAL (shiftop) > 32)
5794 shift_mask_high <<= INTVAL (shiftop) - 32;
5795
5796 lsb = high & -high;
5797
5798 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
5799 return 0;
5800
5801 high = ~high;
5802 high &= -lsb;
5803
5804 lsb = high & -high;
5805 return high == -lsb;
5806 }
5807
5808 shift_mask_low = ~0;
5809 shift_mask_low <<= INTVAL (shiftop);
5810
5811 lsb = low & -low;
5812
5813 if (-lsb != shift_mask_low)
5814 return 0;
5815
5816 if (HOST_BITS_PER_WIDE_INT < 64)
5817 high = ~high;
5818 low = ~low;
5819 low &= -lsb;
5820
5821 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5822 {
5823 lsb = high & -high;
5824 return high == -lsb;
5825 }
5826
5827 lsb = low & -low;
5828 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
5829 }
5830 else
5831 return 0;
5832 }
5833
5834 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
5835 to perform a left shift. It must have SHIFTOP or more least
5836 signifigant 0's, with the remainder of the word 1's. */
5837
5838 int
5839 includes_rldicr_lshift_p (shiftop, andop)
5840 rtx shiftop;
5841 rtx andop;
5842 {
5843 if (GET_CODE (andop) == CONST_INT)
5844 {
5845 HOST_WIDE_INT c, lsb, shift_mask;
5846
5847 shift_mask = ~0;
5848 shift_mask <<= INTVAL (shiftop);
5849 c = INTVAL (andop);
5850
5851 /* Find the least signifigant one bit. */
5852 lsb = c & -c;
5853
5854 /* It must be covered by the shift mask.
5855 This test also rejects c == 0. */
5856 if ((lsb & shift_mask) == 0)
5857 return 0;
5858
5859 /* Check we have all 1's above the transition, and reject all 1's. */
5860 return c == -lsb && lsb != 1;
5861 }
5862 else if (GET_CODE (andop) == CONST_DOUBLE
5863 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5864 {
5865 HOST_WIDE_INT low, lsb, shift_mask_low;
5866
5867 low = CONST_DOUBLE_LOW (andop);
5868
5869 if (HOST_BITS_PER_WIDE_INT < 64)
5870 {
5871 HOST_WIDE_INT high, shift_mask_high;
5872
5873 high = CONST_DOUBLE_HIGH (andop);
5874
5875 if (low == 0)
5876 {
5877 shift_mask_high = ~0;
5878 if (INTVAL (shiftop) > 32)
5879 shift_mask_high <<= INTVAL (shiftop) - 32;
5880
5881 lsb = high & -high;
5882
5883 if ((lsb & shift_mask_high) == 0)
5884 return 0;
5885
5886 return high == -lsb;
5887 }
5888 if (high != ~0)
5889 return 0;
5890 }
5891
5892 shift_mask_low = ~0;
5893 shift_mask_low <<= INTVAL (shiftop);
5894
5895 lsb = low & -low;
5896
5897 if ((lsb & shift_mask_low) == 0)
5898 return 0;
5899
5900 return low == -lsb && lsb != 1;
5901 }
5902 else
5903 return 0;
5904 }
5905
5906 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
5907 for lfq and stfq insns.
5908
5909 Note reg1 and reg2 *must* be hard registers. To be sure we will
5910 abort if we are passed pseudo registers. */
5911
5912 int
5913 registers_ok_for_quad_peep (reg1, reg2)
5914 rtx reg1, reg2;
5915 {
5916 /* We might have been passed a SUBREG. */
5917 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
5918 return 0;
5919
5920 return (REGNO (reg1) == REGNO (reg2) - 1);
5921 }
5922
5923 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
5924 addr1 and addr2 must be in consecutive memory locations
5925 (addr2 == addr1 + 8). */
5926
5927 int
5928 addrs_ok_for_quad_peep (addr1, addr2)
5929 rtx addr1;
5930 rtx addr2;
5931 {
5932 unsigned int reg1;
5933 int offset1;
5934
5935 /* Extract an offset (if used) from the first addr. */
5936 if (GET_CODE (addr1) == PLUS)
5937 {
5938 /* If not a REG, return zero. */
5939 if (GET_CODE (XEXP (addr1, 0)) != REG)
5940 return 0;
5941 else
5942 {
5943 reg1 = REGNO (XEXP (addr1, 0));
5944 /* The offset must be constant! */
5945 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
5946 return 0;
5947 offset1 = INTVAL (XEXP (addr1, 1));
5948 }
5949 }
5950 else if (GET_CODE (addr1) != REG)
5951 return 0;
5952 else
5953 {
5954 reg1 = REGNO (addr1);
5955 /* This was a simple (mem (reg)) expression. Offset is 0. */
5956 offset1 = 0;
5957 }
5958
5959 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
5960 if (GET_CODE (addr2) != PLUS)
5961 return 0;
5962
5963 if (GET_CODE (XEXP (addr2, 0)) != REG
5964 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
5965 return 0;
5966
5967 if (reg1 != REGNO (XEXP (addr2, 0)))
5968 return 0;
5969
5970 /* The offset for the second addr must be 8 more than the first addr. */
5971 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
5972 return 0;
5973
5974 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
5975 instructions. */
5976 return 1;
5977 }
5978 \f
5979 /* Return the register class of a scratch register needed to copy IN into
5980 or out of a register in CLASS in MODE. If it can be done directly,
5981 NO_REGS is returned. */
5982
5983 enum reg_class
5984 secondary_reload_class (class, mode, in)
5985 enum reg_class class;
5986 enum machine_mode mode ATTRIBUTE_UNUSED;
5987 rtx in;
5988 {
5989 int regno;
5990
5991 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN && flag_pic))
5992 {
5993 /* We cannot copy a symbolic operand directly into anything
5994 other than BASE_REGS for TARGET_ELF. So indicate that a
5995 register from BASE_REGS is needed as an intermediate
5996 register.
5997
5998 On Darwin, pic addresses require a load from memory, which
5999 needs a base register. */
6000 if (class != BASE_REGS
6001 && (GET_CODE (in) == SYMBOL_REF
6002 || GET_CODE (in) == HIGH
6003 || GET_CODE (in) == LABEL_REF
6004 || GET_CODE (in) == CONST))
6005 return BASE_REGS;
6006 }
6007
6008 if (GET_CODE (in) == REG)
6009 {
6010 regno = REGNO (in);
6011 if (regno >= FIRST_PSEUDO_REGISTER)
6012 {
6013 regno = true_regnum (in);
6014 if (regno >= FIRST_PSEUDO_REGISTER)
6015 regno = -1;
6016 }
6017 }
6018 else if (GET_CODE (in) == SUBREG)
6019 {
6020 regno = true_regnum (in);
6021 if (regno >= FIRST_PSEUDO_REGISTER)
6022 regno = -1;
6023 }
6024 else
6025 regno = -1;
6026
6027 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
6028 into anything. */
6029 if (class == GENERAL_REGS || class == BASE_REGS
6030 || (regno >= 0 && INT_REGNO_P (regno)))
6031 return NO_REGS;
6032
6033 /* Constants, memory, and FP registers can go into FP registers. */
6034 if ((regno == -1 || FP_REGNO_P (regno))
6035 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
6036 return NO_REGS;
6037
6038 /* Memory, and AltiVec registers can go into AltiVec registers. */
6039 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
6040 && class == ALTIVEC_REGS)
6041 return NO_REGS;
6042
6043 /* We can copy among the CR registers. */
6044 if ((class == CR_REGS || class == CR0_REGS)
6045 && regno >= 0 && CR_REGNO_P (regno))
6046 return NO_REGS;
6047
6048 /* Otherwise, we need GENERAL_REGS. */
6049 return GENERAL_REGS;
6050 }
6051 \f
6052 /* Given a comparison operation, return the bit number in CCR to test. We
6053 know this is a valid comparison.
6054
6055 SCC_P is 1 if this is for an scc. That means that %D will have been
6056 used instead of %C, so the bits will be in different places.
6057
6058 Return -1 if OP isn't a valid comparison for some reason. */
6059
6060 int
6061 ccr_bit (op, scc_p)
6062 rtx op;
6063 int scc_p;
6064 {
6065 enum rtx_code code = GET_CODE (op);
6066 enum machine_mode cc_mode;
6067 int cc_regnum;
6068 int base_bit;
6069 rtx reg;
6070
6071 if (GET_RTX_CLASS (code) != '<')
6072 return -1;
6073
6074 reg = XEXP (op, 0);
6075
6076 if (GET_CODE (reg) != REG
6077 || ! CR_REGNO_P (REGNO (reg)))
6078 abort ();
6079
6080 cc_mode = GET_MODE (reg);
6081 cc_regnum = REGNO (reg);
6082 base_bit = 4 * (cc_regnum - CR0_REGNO);
6083
6084 validate_condition_mode (code, cc_mode);
6085
6086 switch (code)
6087 {
6088 case NE:
6089 return scc_p ? base_bit + 3 : base_bit + 2;
6090 case EQ:
6091 return base_bit + 2;
6092 case GT: case GTU: case UNLE:
6093 return base_bit + 1;
6094 case LT: case LTU: case UNGE:
6095 return base_bit;
6096 case ORDERED: case UNORDERED:
6097 return base_bit + 3;
6098
6099 case GE: case GEU:
6100 /* If scc, we will have done a cror to put the bit in the
6101 unordered position. So test that bit. For integer, this is ! LT
6102 unless this is an scc insn. */
6103 return scc_p ? base_bit + 3 : base_bit;
6104
6105 case LE: case LEU:
6106 return scc_p ? base_bit + 3 : base_bit + 1;
6107
6108 default:
6109 abort ();
6110 }
6111 }
6112 \f
6113 /* Return the GOT register. */
6114
6115 struct rtx_def *
6116 rs6000_got_register (value)
6117 rtx value ATTRIBUTE_UNUSED;
6118 {
6119 /* The second flow pass currently (June 1999) can't update
6120 regs_ever_live without disturbing other parts of the compiler, so
6121 update it here to make the prolog/epilogue code happy. */
6122 if (no_new_pseudos && ! regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6123 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
6124
6125 current_function_uses_pic_offset_table = 1;
6126
6127 return pic_offset_table_rtx;
6128 }
6129 \f
6130 /* Functions to init, mark and free struct machine_function.
6131 These will be called, via pointer variables,
6132 from push_function_context and pop_function_context. */
6133
6134 static void
6135 rs6000_init_machine_status (p)
6136 struct function *p;
6137 {
6138 p->machine = (machine_function *) xcalloc (1, sizeof (machine_function));
6139 }
6140
6141 static void
6142 rs6000_free_machine_status (p)
6143 struct function *p;
6144 {
6145 if (p->machine == NULL)
6146 return;
6147
6148 free (p->machine);
6149 p->machine = NULL;
6150 }
6151
6152 \f
6153 /* Print an operand. Recognize special options, documented below. */
6154
6155 #if TARGET_ELF
6156 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
6157 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
6158 #else
6159 #define SMALL_DATA_RELOC "sda21"
6160 #define SMALL_DATA_REG 0
6161 #endif
6162
6163 void
6164 print_operand (file, x, code)
6165 FILE *file;
6166 rtx x;
6167 int code;
6168 {
6169 int i;
6170 HOST_WIDE_INT val;
6171
6172 /* These macros test for integers and extract the low-order bits. */
6173 #define INT_P(X) \
6174 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
6175 && GET_MODE (X) == VOIDmode)
6176
6177 #define INT_LOWPART(X) \
6178 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
6179
6180 switch (code)
6181 {
6182 case '.':
6183 /* Write out an instruction after the call which may be replaced
6184 with glue code by the loader. This depends on the AIX version. */
6185 asm_fprintf (file, RS6000_CALL_GLUE);
6186 return;
6187
6188 /* %a is output_address. */
6189
6190 case 'A':
6191 /* If X is a constant integer whose low-order 5 bits are zero,
6192 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
6193 in the AIX assembler where "sri" with a zero shift count
6194 writes a trash instruction. */
6195 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
6196 putc ('l', file);
6197 else
6198 putc ('r', file);
6199 return;
6200
6201 case 'b':
6202 /* If constant, low-order 16 bits of constant, unsigned.
6203 Otherwise, write normally. */
6204 if (INT_P (x))
6205 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
6206 else
6207 print_operand (file, x, 0);
6208 return;
6209
6210 case 'B':
6211 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
6212 for 64-bit mask direction. */
6213 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
6214 return;
6215
6216 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
6217 output_operand. */
6218
6219 case 'D':
6220 /* There used to be a comment for 'C' reading "This is an
6221 optional cror needed for certain floating-point
6222 comparisons. Otherwise write nothing." */
6223
6224 /* Similar, except that this is for an scc, so we must be able to
6225 encode the test in a single bit that is one. We do the above
6226 for any LE, GE, GEU, or LEU and invert the bit for NE. */
6227 if (GET_CODE (x) == LE || GET_CODE (x) == GE
6228 || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
6229 {
6230 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6231
6232 fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
6233 base_bit + 2,
6234 base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
6235 }
6236
6237 else if (GET_CODE (x) == NE)
6238 {
6239 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6240
6241 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
6242 base_bit + 2, base_bit + 2);
6243 }
6244 return;
6245
6246 case 'E':
6247 /* X is a CR register. Print the number of the EQ bit of the CR */
6248 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6249 output_operand_lossage ("invalid %%E value");
6250 else
6251 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
6252 return;
6253
6254 case 'f':
6255 /* X is a CR register. Print the shift count needed to move it
6256 to the high-order four bits. */
6257 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6258 output_operand_lossage ("invalid %%f value");
6259 else
6260 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
6261 return;
6262
6263 case 'F':
6264 /* Similar, but print the count for the rotate in the opposite
6265 direction. */
6266 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6267 output_operand_lossage ("invalid %%F value");
6268 else
6269 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
6270 return;
6271
6272 case 'G':
6273 /* X is a constant integer. If it is negative, print "m",
6274 otherwise print "z". This is to make a aze or ame insn. */
6275 if (GET_CODE (x) != CONST_INT)
6276 output_operand_lossage ("invalid %%G value");
6277 else if (INTVAL (x) >= 0)
6278 putc ('z', file);
6279 else
6280 putc ('m', file);
6281 return;
6282
6283 case 'h':
6284 /* If constant, output low-order five bits. Otherwise, write
6285 normally. */
6286 if (INT_P (x))
6287 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
6288 else
6289 print_operand (file, x, 0);
6290 return;
6291
6292 case 'H':
6293 /* If constant, output low-order six bits. Otherwise, write
6294 normally. */
6295 if (INT_P (x))
6296 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
6297 else
6298 print_operand (file, x, 0);
6299 return;
6300
6301 case 'I':
6302 /* Print `i' if this is a constant, else nothing. */
6303 if (INT_P (x))
6304 putc ('i', file);
6305 return;
6306
6307 case 'j':
6308 /* Write the bit number in CCR for jump. */
6309 i = ccr_bit (x, 0);
6310 if (i == -1)
6311 output_operand_lossage ("invalid %%j code");
6312 else
6313 fprintf (file, "%d", i);
6314 return;
6315
6316 case 'J':
6317 /* Similar, but add one for shift count in rlinm for scc and pass
6318 scc flag to `ccr_bit'. */
6319 i = ccr_bit (x, 1);
6320 if (i == -1)
6321 output_operand_lossage ("invalid %%J code");
6322 else
6323 /* If we want bit 31, write a shift count of zero, not 32. */
6324 fprintf (file, "%d", i == 31 ? 0 : i + 1);
6325 return;
6326
6327 case 'k':
6328 /* X must be a constant. Write the 1's complement of the
6329 constant. */
6330 if (! INT_P (x))
6331 output_operand_lossage ("invalid %%k value");
6332 else
6333 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
6334 return;
6335
6336 case 'K':
6337 /* X must be a symbolic constant on ELF. Write an
6338 expression suitable for an 'addi' that adds in the low 16
6339 bits of the MEM. */
6340 if (GET_CODE (x) != CONST)
6341 {
6342 print_operand_address (file, x);
6343 fputs ("@l", file);
6344 }
6345 else
6346 {
6347 if (GET_CODE (XEXP (x, 0)) != PLUS
6348 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
6349 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
6350 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
6351 output_operand_lossage ("invalid %%K value");
6352 print_operand_address (file, XEXP (XEXP (x, 0), 0));
6353 fputs ("@l", file);
6354 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
6355 }
6356 return;
6357
6358 /* %l is output_asm_label. */
6359
6360 case 'L':
6361 /* Write second word of DImode or DFmode reference. Works on register
6362 or non-indexed memory only. */
6363 if (GET_CODE (x) == REG)
6364 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
6365 else if (GET_CODE (x) == MEM)
6366 {
6367 /* Handle possible auto-increment. Since it is pre-increment and
6368 we have already done it, we can just use an offset of word. */
6369 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6370 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6371 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
6372 UNITS_PER_WORD));
6373 else
6374 output_address (XEXP (adjust_address_nv (x, SImode,
6375 UNITS_PER_WORD),
6376 0));
6377
6378 if (small_data_operand (x, GET_MODE (x)))
6379 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6380 reg_names[SMALL_DATA_REG]);
6381 }
6382 return;
6383
6384 case 'm':
6385 /* MB value for a mask operand. */
6386 if (! mask_operand (x, SImode))
6387 output_operand_lossage ("invalid %%m value");
6388
6389 val = INT_LOWPART (x);
6390
6391 /* If the high bit is set and the low bit is not, the value is zero.
6392 If the high bit is zero, the value is the first 1 bit we find from
6393 the left. */
6394 if ((val & 0x80000000) && ((val & 1) == 0))
6395 {
6396 putc ('0', file);
6397 return;
6398 }
6399 else if ((val & 0x80000000) == 0)
6400 {
6401 for (i = 1; i < 32; i++)
6402 if ((val <<= 1) & 0x80000000)
6403 break;
6404 fprintf (file, "%d", i);
6405 return;
6406 }
6407
6408 /* Otherwise, look for the first 0 bit from the right. The result is its
6409 number plus 1. We know the low-order bit is one. */
6410 for (i = 0; i < 32; i++)
6411 if (((val >>= 1) & 1) == 0)
6412 break;
6413
6414 /* If we ended in ...01, i would be 0. The correct value is 31, so
6415 we want 31 - i. */
6416 fprintf (file, "%d", 31 - i);
6417 return;
6418
6419 case 'M':
6420 /* ME value for a mask operand. */
6421 if (! mask_operand (x, SImode))
6422 output_operand_lossage ("invalid %%M value");
6423
6424 val = INT_LOWPART (x);
6425
6426 /* If the low bit is set and the high bit is not, the value is 31.
6427 If the low bit is zero, the value is the first 1 bit we find from
6428 the right. */
6429 if ((val & 1) && ((val & 0x80000000) == 0))
6430 {
6431 fputs ("31", file);
6432 return;
6433 }
6434 else if ((val & 1) == 0)
6435 {
6436 for (i = 0; i < 32; i++)
6437 if ((val >>= 1) & 1)
6438 break;
6439
6440 /* If we had ....10, i would be 0. The result should be
6441 30, so we need 30 - i. */
6442 fprintf (file, "%d", 30 - i);
6443 return;
6444 }
6445
6446 /* Otherwise, look for the first 0 bit from the left. The result is its
6447 number minus 1. We know the high-order bit is one. */
6448 for (i = 0; i < 32; i++)
6449 if (((val <<= 1) & 0x80000000) == 0)
6450 break;
6451
6452 fprintf (file, "%d", i);
6453 return;
6454
6455 /* %n outputs the negative of its operand. */
6456
6457 case 'N':
6458 /* Write the number of elements in the vector times 4. */
6459 if (GET_CODE (x) != PARALLEL)
6460 output_operand_lossage ("invalid %%N value");
6461 else
6462 fprintf (file, "%d", XVECLEN (x, 0) * 4);
6463 return;
6464
6465 case 'O':
6466 /* Similar, but subtract 1 first. */
6467 if (GET_CODE (x) != PARALLEL)
6468 output_operand_lossage ("invalid %%O value");
6469 else
6470 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
6471 return;
6472
6473 case 'p':
6474 /* X is a CONST_INT that is a power of two. Output the logarithm. */
6475 if (! INT_P (x)
6476 || INT_LOWPART (x) < 0
6477 || (i = exact_log2 (INT_LOWPART (x))) < 0)
6478 output_operand_lossage ("invalid %%p value");
6479 else
6480 fprintf (file, "%d", i);
6481 return;
6482
6483 case 'P':
6484 /* The operand must be an indirect memory reference. The result
6485 is the register number. */
6486 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
6487 || REGNO (XEXP (x, 0)) >= 32)
6488 output_operand_lossage ("invalid %%P value");
6489 else
6490 fprintf (file, "%d", REGNO (XEXP (x, 0)));
6491 return;
6492
6493 case 'q':
6494 /* This outputs the logical code corresponding to a boolean
6495 expression. The expression may have one or both operands
6496 negated (if one, only the first one). For condition register
6497 logical operations, it will also treat the negated
6498 CR codes as NOTs, but not handle NOTs of them. */
6499 {
6500 const char *const *t = 0;
6501 const char *s;
6502 enum rtx_code code = GET_CODE (x);
6503 static const char * const tbl[3][3] = {
6504 { "and", "andc", "nor" },
6505 { "or", "orc", "nand" },
6506 { "xor", "eqv", "xor" } };
6507
6508 if (code == AND)
6509 t = tbl[0];
6510 else if (code == IOR)
6511 t = tbl[1];
6512 else if (code == XOR)
6513 t = tbl[2];
6514 else
6515 output_operand_lossage ("invalid %%q value");
6516
6517 if (GET_CODE (XEXP (x, 0)) != NOT)
6518 s = t[0];
6519 else
6520 {
6521 if (GET_CODE (XEXP (x, 1)) == NOT)
6522 s = t[2];
6523 else
6524 s = t[1];
6525 }
6526
6527 fputs (s, file);
6528 }
6529 return;
6530
6531 case 'R':
6532 /* X is a CR register. Print the mask for `mtcrf'. */
6533 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6534 output_operand_lossage ("invalid %%R value");
6535 else
6536 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
6537 return;
6538
6539 case 's':
6540 /* Low 5 bits of 32 - value */
6541 if (! INT_P (x))
6542 output_operand_lossage ("invalid %%s value");
6543 else
6544 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
6545 return;
6546
6547 case 'S':
6548 /* PowerPC64 mask position. All 0's and all 1's are excluded.
6549 CONST_INT 32-bit mask is considered sign-extended so any
6550 transition must occur within the CONST_INT, not on the boundary. */
6551 if (! mask64_operand (x, DImode))
6552 output_operand_lossage ("invalid %%S value");
6553
6554 val = INT_LOWPART (x);
6555
6556 if (val & 1) /* Clear Left */
6557 {
6558 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6559 if (!((val >>= 1) & 1))
6560 break;
6561
6562 #if HOST_BITS_PER_WIDE_INT == 32
6563 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6564 {
6565 val = CONST_DOUBLE_HIGH (x);
6566
6567 if (val == 0)
6568 --i;
6569 else
6570 for (i = 32; i < 64; i++)
6571 if (!((val >>= 1) & 1))
6572 break;
6573 }
6574 #endif
6575 /* i = index of last set bit from right
6576 mask begins at 63 - i from left */
6577 if (i > 63)
6578 output_operand_lossage ("%%S computed all 1's mask");
6579
6580 fprintf (file, "%d", 63 - i);
6581 return;
6582 }
6583 else /* Clear Right */
6584 {
6585 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6586 if ((val >>= 1) & 1)
6587 break;
6588
6589 #if HOST_BITS_PER_WIDE_INT == 32
6590 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6591 {
6592 val = CONST_DOUBLE_HIGH (x);
6593
6594 if (val == (HOST_WIDE_INT) -1)
6595 --i;
6596 else
6597 for (i = 32; i < 64; i++)
6598 if ((val >>= 1) & 1)
6599 break;
6600 }
6601 #endif
6602 /* i = index of last clear bit from right
6603 mask ends at 62 - i from left */
6604 if (i > 62)
6605 output_operand_lossage ("%%S computed all 0's mask");
6606
6607 fprintf (file, "%d", 62 - i);
6608 return;
6609 }
6610
6611 case 'T':
6612 /* Print the symbolic name of a branch target register. */
6613 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
6614 && REGNO (x) != COUNT_REGISTER_REGNUM))
6615 output_operand_lossage ("invalid %%T value");
6616 else if (REGNO (x) == LINK_REGISTER_REGNUM)
6617 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
6618 else
6619 fputs ("ctr", file);
6620 return;
6621
6622 case 'u':
6623 /* High-order 16 bits of constant for use in unsigned operand. */
6624 if (! INT_P (x))
6625 output_operand_lossage ("invalid %%u value");
6626 else
6627 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6628 (INT_LOWPART (x) >> 16) & 0xffff);
6629 return;
6630
6631 case 'v':
6632 /* High-order 16 bits of constant for use in signed operand. */
6633 if (! INT_P (x))
6634 output_operand_lossage ("invalid %%v value");
6635 else
6636 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6637 (INT_LOWPART (x) >> 16) & 0xffff);
6638 return;
6639
6640 case 'U':
6641 /* Print `u' if this has an auto-increment or auto-decrement. */
6642 if (GET_CODE (x) == MEM
6643 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6644 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
6645 putc ('u', file);
6646 return;
6647
6648 case 'V':
6649 /* Print the trap code for this operand. */
6650 switch (GET_CODE (x))
6651 {
6652 case EQ:
6653 fputs ("eq", file); /* 4 */
6654 break;
6655 case NE:
6656 fputs ("ne", file); /* 24 */
6657 break;
6658 case LT:
6659 fputs ("lt", file); /* 16 */
6660 break;
6661 case LE:
6662 fputs ("le", file); /* 20 */
6663 break;
6664 case GT:
6665 fputs ("gt", file); /* 8 */
6666 break;
6667 case GE:
6668 fputs ("ge", file); /* 12 */
6669 break;
6670 case LTU:
6671 fputs ("llt", file); /* 2 */
6672 break;
6673 case LEU:
6674 fputs ("lle", file); /* 6 */
6675 break;
6676 case GTU:
6677 fputs ("lgt", file); /* 1 */
6678 break;
6679 case GEU:
6680 fputs ("lge", file); /* 5 */
6681 break;
6682 default:
6683 abort ();
6684 }
6685 break;
6686
6687 case 'w':
6688 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
6689 normally. */
6690 if (INT_P (x))
6691 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
6692 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
6693 else
6694 print_operand (file, x, 0);
6695 return;
6696
6697 case 'W':
6698 /* MB value for a PowerPC64 rldic operand. */
6699 val = (GET_CODE (x) == CONST_INT
6700 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
6701
6702 if (val < 0)
6703 i = -1;
6704 else
6705 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6706 if ((val <<= 1) < 0)
6707 break;
6708
6709 #if HOST_BITS_PER_WIDE_INT == 32
6710 if (GET_CODE (x) == CONST_INT && i >= 0)
6711 i += 32; /* zero-extend high-part was all 0's */
6712 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6713 {
6714 val = CONST_DOUBLE_LOW (x);
6715
6716 if (val == 0)
6717 abort ();
6718 else if (val < 0)
6719 --i;
6720 else
6721 for ( ; i < 64; i++)
6722 if ((val <<= 1) < 0)
6723 break;
6724 }
6725 #endif
6726
6727 fprintf (file, "%d", i + 1);
6728 return;
6729
6730 case 'X':
6731 if (GET_CODE (x) == MEM
6732 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
6733 putc ('x', file);
6734 return;
6735
6736 case 'Y':
6737 /* Like 'L', for third word of TImode */
6738 if (GET_CODE (x) == REG)
6739 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
6740 else if (GET_CODE (x) == MEM)
6741 {
6742 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6743 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6744 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6745 else
6746 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
6747 if (small_data_operand (x, GET_MODE (x)))
6748 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6749 reg_names[SMALL_DATA_REG]);
6750 }
6751 return;
6752
6753 case 'z':
6754 /* X is a SYMBOL_REF. Write out the name preceded by a
6755 period and without any trailing data in brackets. Used for function
6756 names. If we are configured for System V (or the embedded ABI) on
6757 the PowerPC, do not emit the period, since those systems do not use
6758 TOCs and the like. */
6759 if (GET_CODE (x) != SYMBOL_REF)
6760 abort ();
6761
6762 if (XSTR (x, 0)[0] != '.')
6763 {
6764 switch (DEFAULT_ABI)
6765 {
6766 default:
6767 abort ();
6768
6769 case ABI_AIX:
6770 putc ('.', file);
6771 break;
6772
6773 case ABI_V4:
6774 case ABI_AIX_NODESC:
6775 case ABI_DARWIN:
6776 break;
6777 }
6778 }
6779 #if TARGET_AIX
6780 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
6781 #else
6782 assemble_name (file, XSTR (x, 0));
6783 #endif
6784 return;
6785
6786 case 'Z':
6787 /* Like 'L', for last word of TImode. */
6788 if (GET_CODE (x) == REG)
6789 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
6790 else if (GET_CODE (x) == MEM)
6791 {
6792 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6793 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6794 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6795 else
6796 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
6797 if (small_data_operand (x, GET_MODE (x)))
6798 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6799 reg_names[SMALL_DATA_REG]);
6800 }
6801 return;
6802
6803 /* Print AltiVec memory operand. */
6804 case 'y':
6805 {
6806 rtx tmp;
6807
6808 if (GET_CODE (x) != MEM)
6809 abort ();
6810
6811 tmp = XEXP (x, 0);
6812
6813 if (GET_CODE (tmp) == REG)
6814 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
6815 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
6816 {
6817 if (REGNO (XEXP (tmp, 0)) == 0)
6818 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
6819 reg_names[ REGNO (XEXP (tmp, 0)) ]);
6820 else
6821 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
6822 reg_names[ REGNO (XEXP (tmp, 1)) ]);
6823 }
6824 else
6825 abort ();
6826 break;
6827 }
6828
6829 case 0:
6830 if (GET_CODE (x) == REG)
6831 fprintf (file, "%s", reg_names[REGNO (x)]);
6832 else if (GET_CODE (x) == MEM)
6833 {
6834 /* We need to handle PRE_INC and PRE_DEC here, since we need to
6835 know the width from the mode. */
6836 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
6837 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
6838 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6839 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
6840 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
6841 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6842 else
6843 output_address (XEXP (x, 0));
6844 }
6845 else
6846 output_addr_const (file, x);
6847 return;
6848
6849 default:
6850 output_operand_lossage ("invalid %%xn code");
6851 }
6852 }
6853 \f
6854 /* Print the address of an operand. */
6855
6856 void
6857 print_operand_address (file, x)
6858 FILE *file;
6859 rtx x;
6860 {
6861 if (GET_CODE (x) == REG)
6862 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
6863 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
6864 || GET_CODE (x) == LABEL_REF)
6865 {
6866 output_addr_const (file, x);
6867 if (small_data_operand (x, GET_MODE (x)))
6868 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6869 reg_names[SMALL_DATA_REG]);
6870 else if (TARGET_TOC)
6871 abort ();
6872 }
6873 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
6874 {
6875 if (REGNO (XEXP (x, 0)) == 0)
6876 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
6877 reg_names[ REGNO (XEXP (x, 0)) ]);
6878 else
6879 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
6880 reg_names[ REGNO (XEXP (x, 1)) ]);
6881 }
6882 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
6883 {
6884 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
6885 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6886 }
6887 #if TARGET_ELF
6888 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
6889 && CONSTANT_P (XEXP (x, 1)))
6890 {
6891 output_addr_const (file, XEXP (x, 1));
6892 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6893 }
6894 #endif
6895 #if TARGET_MACHO
6896 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
6897 && CONSTANT_P (XEXP (x, 1)))
6898 {
6899 fprintf (file, "lo16(");
6900 output_addr_const (file, XEXP (x, 1));
6901 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6902 }
6903 #endif
6904 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
6905 {
6906 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
6907 {
6908 rtx contains_minus = XEXP (x, 1);
6909 rtx minus, symref;
6910 const char *name;
6911
6912 /* Find the (minus (sym) (toc)) buried in X, and temporarily
6913 turn it into (sym) for output_addr_const. */
6914 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
6915 contains_minus = XEXP (contains_minus, 0);
6916
6917 minus = XEXP (contains_minus, 0);
6918 symref = XEXP (minus, 0);
6919 XEXP (contains_minus, 0) = symref;
6920 if (TARGET_ELF)
6921 {
6922 char *newname;
6923
6924 name = XSTR (symref, 0);
6925 newname = alloca (strlen (name) + sizeof ("@toc"));
6926 strcpy (newname, name);
6927 strcat (newname, "@toc");
6928 XSTR (symref, 0) = newname;
6929 }
6930 output_addr_const (file, XEXP (x, 1));
6931 if (TARGET_ELF)
6932 XSTR (symref, 0) = name;
6933 XEXP (contains_minus, 0) = minus;
6934 }
6935 else
6936 output_addr_const (file, XEXP (x, 1));
6937
6938 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
6939 }
6940 else
6941 abort ();
6942 }
6943 \f
6944 /* Target hook for assembling integer objects. The powerpc version has
6945 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
6946 is defined. It also needs to handle DI-mode objects on 64-bit
6947 targets. */
6948
6949 static bool
6950 rs6000_assemble_integer (x, size, aligned_p)
6951 rtx x;
6952 unsigned int size;
6953 int aligned_p;
6954 {
6955 #ifdef RELOCATABLE_NEEDS_FIXUP
6956 /* Special handling for SI values. */
6957 if (size == 4 && aligned_p)
6958 {
6959 extern int in_toc_section PARAMS ((void));
6960 static int recurse = 0;
6961
6962 /* For -mrelocatable, we mark all addresses that need to be fixed up
6963 in the .fixup section. */
6964 if (TARGET_RELOCATABLE
6965 && !in_toc_section ()
6966 && !in_text_section ()
6967 && !recurse
6968 && GET_CODE (x) != CONST_INT
6969 && GET_CODE (x) != CONST_DOUBLE
6970 && CONSTANT_P (x))
6971 {
6972 char buf[256];
6973
6974 recurse = 1;
6975 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
6976 fixuplabelno++;
6977 ASM_OUTPUT_LABEL (asm_out_file, buf);
6978 fprintf (asm_out_file, "\t.long\t(");
6979 output_addr_const (asm_out_file, x);
6980 fprintf (asm_out_file, ")@fixup\n");
6981 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
6982 ASM_OUTPUT_ALIGN (asm_out_file, 2);
6983 fprintf (asm_out_file, "\t.long\t");
6984 assemble_name (asm_out_file, buf);
6985 fprintf (asm_out_file, "\n\t.previous\n");
6986 recurse = 0;
6987 return true;
6988 }
6989 /* Remove initial .'s to turn a -mcall-aixdesc function
6990 address into the address of the descriptor, not the function
6991 itself. */
6992 else if (GET_CODE (x) == SYMBOL_REF
6993 && XSTR (x, 0)[0] == '.'
6994 && DEFAULT_ABI == ABI_AIX)
6995 {
6996 const char *name = XSTR (x, 0);
6997 while (*name == '.')
6998 name++;
6999
7000 fprintf (asm_out_file, "\t.long\t%s\n", name);
7001 return true;
7002 }
7003 }
7004 #endif /* RELOCATABLE_NEEDS_FIXUP */
7005 return default_assemble_integer (x, size, aligned_p);
7006 }
7007 \f
7008 enum rtx_code
7009 rs6000_reverse_condition (mode, code)
7010 enum machine_mode mode;
7011 enum rtx_code code;
7012 {
7013 /* Reversal of FP compares takes care -- an ordered compare
7014 becomes an unordered compare and vice versa. */
7015 if (mode == CCFPmode)
7016 return reverse_condition_maybe_unordered (code);
7017 else
7018 return reverse_condition (code);
7019 }
7020
7021 /* Generate a compare for CODE. Return a brand-new rtx that
7022 represents the result of the compare. */
7023
7024 static rtx
7025 rs6000_generate_compare (code)
7026 enum rtx_code code;
7027 {
7028 enum machine_mode comp_mode;
7029 rtx compare_result;
7030
7031 if (rs6000_compare_fp_p)
7032 comp_mode = CCFPmode;
7033 else if (code == GTU || code == LTU
7034 || code == GEU || code == LEU)
7035 comp_mode = CCUNSmode;
7036 else
7037 comp_mode = CCmode;
7038
7039 /* First, the compare. */
7040 compare_result = gen_reg_rtx (comp_mode);
7041 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
7042 gen_rtx_COMPARE (comp_mode,
7043 rs6000_compare_op0,
7044 rs6000_compare_op1)));
7045
7046 /* Some kinds of FP comparisons need an OR operation;
7047 except for flag_unsafe_math_optimizations we don't bother. */
7048 if (rs6000_compare_fp_p
7049 && ! flag_unsafe_math_optimizations
7050 && (code == LE || code == GE
7051 || code == UNEQ || code == LTGT
7052 || code == UNGT || code == UNLT))
7053 {
7054 enum rtx_code or1, or2;
7055 rtx or1_rtx, or2_rtx, compare2_rtx;
7056 rtx or_result = gen_reg_rtx (CCEQmode);
7057
7058 switch (code)
7059 {
7060 case LE: or1 = LT; or2 = EQ; break;
7061 case GE: or1 = GT; or2 = EQ; break;
7062 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
7063 case LTGT: or1 = LT; or2 = GT; break;
7064 case UNGT: or1 = UNORDERED; or2 = GT; break;
7065 case UNLT: or1 = UNORDERED; or2 = LT; break;
7066 default: abort ();
7067 }
7068 validate_condition_mode (or1, comp_mode);
7069 validate_condition_mode (or2, comp_mode);
7070 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
7071 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
7072 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
7073 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
7074 const_true_rtx);
7075 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
7076
7077 compare_result = or_result;
7078 code = EQ;
7079 }
7080
7081 validate_condition_mode (code, GET_MODE (compare_result));
7082
7083 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
7084 }
7085
7086
7087 /* Emit the RTL for an sCOND pattern. */
7088
7089 void
7090 rs6000_emit_sCOND (code, result)
7091 enum rtx_code code;
7092 rtx result;
7093 {
7094 rtx condition_rtx;
7095 enum machine_mode op_mode;
7096
7097 condition_rtx = rs6000_generate_compare (code);
7098
7099 op_mode = GET_MODE (rs6000_compare_op0);
7100 if (op_mode == VOIDmode)
7101 op_mode = GET_MODE (rs6000_compare_op1);
7102
7103 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
7104 {
7105 PUT_MODE (condition_rtx, DImode);
7106 convert_move (result, condition_rtx, 0);
7107 }
7108 else
7109 {
7110 PUT_MODE (condition_rtx, SImode);
7111 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
7112 }
7113 }
7114
7115 /* Emit a branch of kind CODE to location LOC. */
7116
7117 void
7118 rs6000_emit_cbranch (code, loc)
7119 enum rtx_code code;
7120 rtx loc;
7121 {
7122 rtx condition_rtx, loc_ref;
7123
7124 condition_rtx = rs6000_generate_compare (code);
7125 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
7126 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
7127 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
7128 loc_ref, pc_rtx)));
7129 }
7130
7131 /* Return the string to output a conditional branch to LABEL, which is
7132 the operand number of the label, or -1 if the branch is really a
7133 conditional return.
7134
7135 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
7136 condition code register and its mode specifies what kind of
7137 comparison we made.
7138
7139 REVERSED is non-zero if we should reverse the sense of the comparison.
7140
7141 INSN is the insn. */
7142
7143 char *
7144 output_cbranch (op, label, reversed, insn)
7145 rtx op;
7146 const char * label;
7147 int reversed;
7148 rtx insn;
7149 {
7150 static char string[64];
7151 enum rtx_code code = GET_CODE (op);
7152 rtx cc_reg = XEXP (op, 0);
7153 enum machine_mode mode = GET_MODE (cc_reg);
7154 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
7155 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
7156 int really_reversed = reversed ^ need_longbranch;
7157 char *s = string;
7158 const char *ccode;
7159 const char *pred;
7160 rtx note;
7161
7162 validate_condition_mode (code, mode);
7163
7164 /* Work out which way this really branches. We could use
7165 reverse_condition_maybe_unordered here always but this
7166 makes the resulting assembler clearer. */
7167 if (really_reversed)
7168 code = rs6000_reverse_condition (mode, code);
7169
7170 switch (code)
7171 {
7172 /* Not all of these are actually distinct opcodes, but
7173 we distinguish them for clarity of the resulting assembler. */
7174 case NE: case LTGT:
7175 ccode = "ne"; break;
7176 case EQ: case UNEQ:
7177 ccode = "eq"; break;
7178 case GE: case GEU:
7179 ccode = "ge"; break;
7180 case GT: case GTU: case UNGT:
7181 ccode = "gt"; break;
7182 case LE: case LEU:
7183 ccode = "le"; break;
7184 case LT: case LTU: case UNLT:
7185 ccode = "lt"; break;
7186 case UNORDERED: ccode = "un"; break;
7187 case ORDERED: ccode = "nu"; break;
7188 case UNGE: ccode = "nl"; break;
7189 case UNLE: ccode = "ng"; break;
7190 default:
7191 abort ();
7192 }
7193
7194 /* Maybe we have a guess as to how likely the branch is.
7195 The old mnemonics don't have a way to specify this information. */
7196 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
7197 if (note != NULL_RTX)
7198 {
7199 /* PROB is the difference from 50%. */
7200 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
7201
7202 /* For branches that are very close to 50%, assume not-taken. */
7203 if (abs (prob) > REG_BR_PROB_BASE / 20
7204 && ((prob > 0) ^ need_longbranch))
7205 pred = "+";
7206 else
7207 pred = "-";
7208 }
7209 else
7210 pred = "";
7211
7212 if (label == NULL)
7213 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
7214 else
7215 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
7216
7217 /* We need to escape any '%' characters in the reg_names string.
7218 Assume they'd only be the first character... */
7219 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
7220 *s++ = '%';
7221 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
7222
7223 if (label != NULL)
7224 {
7225 /* If the branch distance was too far, we may have to use an
7226 unconditional branch to go the distance. */
7227 if (need_longbranch)
7228 s += sprintf (s, ",$+8\n\tb %s", label);
7229 else
7230 s += sprintf (s, ",%s", label);
7231 }
7232
7233 return string;
7234 }
7235
7236 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
7237 operands of the last comparison is nonzero/true, FALSE_COND if it
7238 is zero/false. Return 0 if the hardware has no such operation. */
7239
7240 int
7241 rs6000_emit_cmove (dest, op, true_cond, false_cond)
7242 rtx dest;
7243 rtx op;
7244 rtx true_cond;
7245 rtx false_cond;
7246 {
7247 enum rtx_code code = GET_CODE (op);
7248 rtx op0 = rs6000_compare_op0;
7249 rtx op1 = rs6000_compare_op1;
7250 REAL_VALUE_TYPE c1;
7251 enum machine_mode mode = GET_MODE (op0);
7252 rtx temp;
7253
7254 /* First, work out if the hardware can do this at all, or
7255 if it's too slow... */
7256 /* If the comparison is an integer one, since we only have fsel
7257 it'll be cheaper to use a branch. */
7258 if (! rs6000_compare_fp_p)
7259 return 0;
7260
7261 /* Eliminate half of the comparisons by switching operands, this
7262 makes the remaining code simpler. */
7263 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
7264 || code == LTGT || code == LT)
7265 {
7266 code = reverse_condition_maybe_unordered (code);
7267 temp = true_cond;
7268 true_cond = false_cond;
7269 false_cond = temp;
7270 }
7271
7272 /* UNEQ and LTGT take four instructions for a comparison with zero,
7273 it'll probably be faster to use a branch here too. */
7274 if (code == UNEQ)
7275 return 0;
7276
7277 if (GET_CODE (op1) == CONST_DOUBLE)
7278 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
7279
7280 /* We're going to try to implement comparions by performing
7281 a subtract, then comparing against zero. Unfortunately,
7282 Inf - Inf is NaN which is not zero, and so if we don't
7283 know that the the operand is finite and the comparison
7284 would treat EQ different to UNORDERED, we can't do it. */
7285 if (! flag_unsafe_math_optimizations
7286 && code != GT && code != UNGE
7287 && (GET_CODE (op1) != CONST_DOUBLE || target_isinf (c1))
7288 /* Constructs of the form (a OP b ? a : b) are safe. */
7289 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
7290 || (! rtx_equal_p (op0, true_cond)
7291 && ! rtx_equal_p (op1, true_cond))))
7292 return 0;
7293 /* At this point we know we can use fsel. */
7294
7295 /* Reduce the comparison to a comparison against zero. */
7296 temp = gen_reg_rtx (mode);
7297 emit_insn (gen_rtx_SET (VOIDmode, temp,
7298 gen_rtx_MINUS (mode, op0, op1)));
7299 op0 = temp;
7300 op1 = CONST0_RTX (mode);
7301
7302 /* If we don't care about NaNs we can reduce some of the comparisons
7303 down to faster ones. */
7304 if (flag_unsafe_math_optimizations)
7305 switch (code)
7306 {
7307 case GT:
7308 code = LE;
7309 temp = true_cond;
7310 true_cond = false_cond;
7311 false_cond = temp;
7312 break;
7313 case UNGE:
7314 code = GE;
7315 break;
7316 case UNEQ:
7317 code = EQ;
7318 break;
7319 default:
7320 break;
7321 }
7322
7323 /* Now, reduce everything down to a GE. */
7324 switch (code)
7325 {
7326 case GE:
7327 break;
7328
7329 case LE:
7330 temp = gen_reg_rtx (mode);
7331 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
7332 op0 = temp;
7333 break;
7334
7335 case ORDERED:
7336 temp = gen_reg_rtx (mode);
7337 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (mode, op0)));
7338 op0 = temp;
7339 break;
7340
7341 case EQ:
7342 temp = gen_reg_rtx (mode);
7343 emit_insn (gen_rtx_SET (VOIDmode, temp,
7344 gen_rtx_NEG (mode,
7345 gen_rtx_ABS (mode, op0))));
7346 op0 = temp;
7347 break;
7348
7349 case UNGE:
7350 temp = gen_reg_rtx (mode);
7351 emit_insn (gen_rtx_SET (VOIDmode, temp,
7352 gen_rtx_IF_THEN_ELSE (mode,
7353 gen_rtx_GE (VOIDmode,
7354 op0, op1),
7355 true_cond, false_cond)));
7356 false_cond = temp;
7357 true_cond = false_cond;
7358
7359 temp = gen_reg_rtx (mode);
7360 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
7361 op0 = temp;
7362 break;
7363
7364 case GT:
7365 temp = gen_reg_rtx (mode);
7366 emit_insn (gen_rtx_SET (VOIDmode, temp,
7367 gen_rtx_IF_THEN_ELSE (mode,
7368 gen_rtx_GE (VOIDmode,
7369 op0, op1),
7370 true_cond, false_cond)));
7371 true_cond = temp;
7372 false_cond = true_cond;
7373
7374 temp = gen_reg_rtx (mode);
7375 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (mode, op0)));
7376 op0 = temp;
7377 break;
7378
7379 default:
7380 abort ();
7381 }
7382
7383 emit_insn (gen_rtx_SET (VOIDmode, dest,
7384 gen_rtx_IF_THEN_ELSE (GET_MODE (dest),
7385 gen_rtx_GE (VOIDmode,
7386 op0, op1),
7387 true_cond, false_cond)));
7388 return 1;
7389 }
7390
7391 void
7392 rs6000_emit_minmax (dest, code, op0, op1)
7393 rtx dest;
7394 enum rtx_code code;
7395 rtx op0;
7396 rtx op1;
7397 {
7398 enum machine_mode mode = GET_MODE (op0);
7399 rtx target;
7400 if (code == SMAX || code == UMAX)
7401 target = emit_conditional_move (dest, GE, op0, op1, mode,
7402 op0, op1, mode, 0);
7403 else
7404 target = emit_conditional_move (dest, GE, op0, op1, mode,
7405 op1, op0, mode, 0);
7406 if (target == NULL_RTX)
7407 abort ();
7408 if (target != dest)
7409 emit_move_insn (dest, target);
7410 }
7411 \f
7412 /* This page contains routines that are used to determine what the
7413 function prologue and epilogue code will do and write them out. */
7414
7415 /* Return the first fixed-point register that is required to be
7416 saved. 32 if none. */
7417
7418 int
7419 first_reg_to_save ()
7420 {
7421 int first_reg;
7422
7423 /* Find lowest numbered live register. */
7424 for (first_reg = 13; first_reg <= 31; first_reg++)
7425 if (regs_ever_live[first_reg]
7426 && (! call_used_regs[first_reg]
7427 || (first_reg == PIC_OFFSET_TABLE_REGNUM
7428 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
7429 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
7430 break;
7431
7432 if (current_function_profile)
7433 {
7434 /* AIX must save/restore every register that contains a parameter
7435 before/after the .__mcount call plus an additional register
7436 for the static chain, if needed; use registers from 30 down to 22
7437 to do this. */
7438 if (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
7439 {
7440 int last_parm_reg, profile_first_reg;
7441
7442 /* Figure out last used parameter register. The proper thing
7443 to do is to walk incoming args of the function. A function
7444 might have live parameter registers even if it has no
7445 incoming args. */
7446 for (last_parm_reg = 10;
7447 last_parm_reg > 2 && ! regs_ever_live [last_parm_reg];
7448 last_parm_reg--)
7449 ;
7450
7451 /* Calculate first reg for saving parameter registers
7452 and static chain.
7453 Skip reg 31 which may contain the frame pointer. */
7454 profile_first_reg = (33 - last_parm_reg
7455 - (current_function_needs_context ? 1 : 0));
7456 #if TARGET_MACHO
7457 /* Need to skip another reg to account for R31 being PICBASE
7458 (when flag_pic is set) or R30 being used as the frame
7459 pointer (when flag_pic is not set). */
7460 --profile_first_reg;
7461 #endif
7462 /* Do not save frame pointer if no parameters needs to be saved. */
7463 if (profile_first_reg == 31)
7464 profile_first_reg = 32;
7465
7466 if (first_reg > profile_first_reg)
7467 first_reg = profile_first_reg;
7468 }
7469
7470 /* SVR4 may need one register to preserve the static chain. */
7471 else if (current_function_needs_context)
7472 {
7473 /* Skip reg 31 which may contain the frame pointer. */
7474 if (first_reg > 30)
7475 first_reg = 30;
7476 }
7477 }
7478
7479 #if TARGET_MACHO
7480 if (flag_pic && current_function_uses_pic_offset_table &&
7481 (first_reg > PIC_OFFSET_TABLE_REGNUM))
7482 return PIC_OFFSET_TABLE_REGNUM;
7483 #endif
7484
7485 return first_reg;
7486 }
7487
7488 /* Similar, for FP regs. */
7489
7490 int
7491 first_fp_reg_to_save ()
7492 {
7493 int first_reg;
7494
7495 /* Find lowest numbered live register. */
7496 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
7497 if (regs_ever_live[first_reg])
7498 break;
7499
7500 return first_reg;
7501 }
7502
7503 /* Similar, for AltiVec regs. */
7504
7505 static int
7506 first_altivec_reg_to_save ()
7507 {
7508 int i;
7509
7510 /* Stack frame remains as is unless we are in AltiVec ABI. */
7511 if (! TARGET_ALTIVEC_ABI)
7512 return LAST_ALTIVEC_REGNO + 1;
7513
7514 /* Find lowest numbered live register. */
7515 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
7516 if (regs_ever_live[i])
7517 break;
7518
7519 return i;
7520 }
7521
7522 /* Return a 32-bit mask of the AltiVec registers we need to set in
7523 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
7524 the 32-bit word is 0. */
7525
7526 static unsigned int
7527 compute_vrsave_mask ()
7528 {
7529 unsigned int i, mask = 0;
7530
7531 /* First, find out if we use _any_ altivec registers. */
7532 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7533 if (regs_ever_live[i])
7534 mask |= ALTIVEC_REG_BIT (i);
7535
7536 if (mask == 0)
7537 return mask;
7538
7539 /* Next, add all registers that are call-clobbered. We do this
7540 because post-reload register optimizers such as regrename_optimize
7541 may choose to use them. They never change the register class
7542 chosen by reload, so cannot create new uses of altivec registers
7543 if there were none before, so the early exit above is safe. */
7544 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
7545 altivec registers not saved in the mask, which might well make the
7546 adjustments below more effective in eliding the save/restore of
7547 VRSAVE in small functions. */
7548 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7549 if (call_used_regs[i])
7550 mask |= ALTIVEC_REG_BIT (i);
7551
7552 /* Next, remove the argument registers from the set. These must
7553 be in the VRSAVE mask set by the caller, so we don't need to add
7554 them in again. More importantly, the mask we compute here is
7555 used to generate CLOBBERs in the set_vrsave insn, and we do not
7556 wish the argument registers to die. */
7557 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
7558 mask &= ~ALTIVEC_REG_BIT (i);
7559
7560 /* Similarly, remove the return value from the set. */
7561 {
7562 bool yes = false;
7563 diddle_return_value (is_altivec_return_reg, &yes);
7564 if (yes)
7565 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
7566 }
7567
7568 return mask;
7569 }
7570
7571 static void
7572 is_altivec_return_reg (reg, xyes)
7573 rtx reg;
7574 void *xyes;
7575 {
7576 bool *yes = (bool *) xyes;
7577 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
7578 *yes = true;
7579 }
7580
7581 \f
7582 /* Calculate the stack information for the current function. This is
7583 complicated by having two separate calling sequences, the AIX calling
7584 sequence and the V.4 calling sequence.
7585
7586 AIX (and Darwin/Mac OS X) stack frames look like:
7587 32-bit 64-bit
7588 SP----> +---------------------------------------+
7589 | back chain to caller | 0 0
7590 +---------------------------------------+
7591 | saved CR | 4 8 (8-11)
7592 +---------------------------------------+
7593 | saved LR | 8 16
7594 +---------------------------------------+
7595 | reserved for compilers | 12 24
7596 +---------------------------------------+
7597 | reserved for binders | 16 32
7598 +---------------------------------------+
7599 | saved TOC pointer | 20 40
7600 +---------------------------------------+
7601 | Parameter save area (P) | 24 48
7602 +---------------------------------------+
7603 | Alloca space (A) | 24+P etc.
7604 +---------------------------------------+
7605 | Local variable space (L) | 24+P+A
7606 +---------------------------------------+
7607 | Float/int conversion temporary (X) | 24+P+A+L
7608 +---------------------------------------+
7609 | Save area for AltiVec registers (W) | 24+P+A+L+X
7610 +---------------------------------------+
7611 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
7612 +---------------------------------------+
7613 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
7614 +---------------------------------------+
7615 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
7616 +---------------------------------------+
7617 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
7618 +---------------------------------------+
7619 old SP->| back chain to caller's caller |
7620 +---------------------------------------+
7621
7622 The required alignment for AIX configurations is two words (i.e., 8
7623 or 16 bytes).
7624
7625
7626 V.4 stack frames look like:
7627
7628 SP----> +---------------------------------------+
7629 | back chain to caller | 0
7630 +---------------------------------------+
7631 | caller's saved LR | 4
7632 +---------------------------------------+
7633 | Parameter save area (P) | 8
7634 +---------------------------------------+
7635 | Alloca space (A) | 8+P
7636 +---------------------------------------+
7637 | Varargs save area (V) | 8+P+A
7638 +---------------------------------------+
7639 | Local variable space (L) | 8+P+A+V
7640 +---------------------------------------+
7641 | Float/int conversion temporary (X) | 8+P+A+V+L
7642 +---------------------------------------+
7643 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
7644 +---------------------------------------+
7645 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
7646 +---------------------------------------+
7647 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
7648 +---------------------------------------+
7649 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
7650 +---------------------------------------+
7651 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
7652 +---------------------------------------+
7653 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
7654 +---------------------------------------+
7655 old SP->| back chain to caller's caller |
7656 +---------------------------------------+
7657
7658 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
7659 given. (But note below and in sysv4.h that we require only 8 and
7660 may round up the size of our stack frame anyways. The historical
7661 reason is early versions of powerpc-linux which didn't properly
7662 align the stack at program startup. A happy side-effect is that
7663 -mno-eabi libraries can be used with -meabi programs.)
7664
7665 The EABI configuration defaults to the V.4 layout, unless
7666 -mcall-aix is used, in which case the AIX layout is used. However,
7667 the stack alignment requirements may differ. If -mno-eabi is not
7668 given, the required stack alignment is 8 bytes; if -mno-eabi is
7669 given, the required alignment is 16 bytes. (But see V.4 comment
7670 above.) */
7671
7672 #ifndef ABI_STACK_BOUNDARY
7673 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
7674 #endif
7675
7676 rs6000_stack_t *
7677 rs6000_stack_info ()
7678 {
7679 static rs6000_stack_t info, zero_info;
7680 rs6000_stack_t *info_ptr = &info;
7681 int reg_size = TARGET_POWERPC64 ? 8 : 4;
7682 enum rs6000_abi abi;
7683 int ehrd_size;
7684 int total_raw_size;
7685
7686 /* Zero all fields portably. */
7687 info = zero_info;
7688
7689 /* Select which calling sequence. */
7690 info_ptr->abi = abi = DEFAULT_ABI;
7691
7692 /* Calculate which registers need to be saved & save area size. */
7693 info_ptr->first_gp_reg_save = first_reg_to_save ();
7694 /* Assume that we will have to save PIC_OFFSET_TABLE_REGNUM,
7695 even if it currently looks like we won't. */
7696 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
7697 || (flag_pic == 1 && abi == ABI_V4)
7698 || (flag_pic && abi == ABI_DARWIN))
7699 && info_ptr->first_gp_reg_save > PIC_OFFSET_TABLE_REGNUM)
7700 info_ptr->gp_size = reg_size * (32 - PIC_OFFSET_TABLE_REGNUM);
7701 else
7702 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
7703
7704 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
7705 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
7706
7707 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
7708 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
7709 - info_ptr->first_altivec_reg_save);
7710
7711 /* Does this function call anything? */
7712 info_ptr->calls_p = (! current_function_is_leaf
7713 || cfun->machine->ra_needs_full_frame);
7714
7715 /* Determine if we need to save the link register. */
7716 if (rs6000_ra_ever_killed ()
7717 || (DEFAULT_ABI == ABI_AIX && current_function_profile)
7718 #ifdef TARGET_RELOCATABLE
7719 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
7720 #endif
7721 || (info_ptr->first_fp_reg_save != 64
7722 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
7723 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
7724 || (abi == ABI_V4 && current_function_calls_alloca)
7725 || (DEFAULT_ABI == ABI_DARWIN
7726 && flag_pic
7727 && current_function_uses_pic_offset_table)
7728 || info_ptr->calls_p)
7729 {
7730 info_ptr->lr_save_p = 1;
7731 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
7732 }
7733
7734 /* Determine if we need to save the condition code registers. */
7735 if (regs_ever_live[CR2_REGNO]
7736 || regs_ever_live[CR3_REGNO]
7737 || regs_ever_live[CR4_REGNO])
7738 {
7739 info_ptr->cr_save_p = 1;
7740 if (abi == ABI_V4)
7741 info_ptr->cr_size = reg_size;
7742 }
7743
7744 /* If the current function calls __builtin_eh_return, then we need
7745 to allocate stack space for registers that will hold data for
7746 the exception handler. */
7747 if (current_function_calls_eh_return)
7748 {
7749 unsigned int i;
7750 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
7751 continue;
7752 ehrd_size = i * UNITS_PER_WORD;
7753 }
7754 else
7755 ehrd_size = 0;
7756
7757 /* Determine various sizes. */
7758 info_ptr->reg_size = reg_size;
7759 info_ptr->fixed_size = RS6000_SAVE_AREA;
7760 info_ptr->varargs_size = RS6000_VARARGS_AREA;
7761 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
7762 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
7763 8);
7764
7765 if (TARGET_ALTIVEC_ABI)
7766 {
7767 info_ptr->vrsave_mask = compute_vrsave_mask ();
7768 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
7769 }
7770 else
7771 {
7772 info_ptr->vrsave_mask = 0;
7773 info_ptr->vrsave_size = 0;
7774 }
7775
7776 /* Calculate the offsets. */
7777 switch (abi)
7778 {
7779 case ABI_NONE:
7780 default:
7781 abort ();
7782
7783 case ABI_AIX:
7784 case ABI_AIX_NODESC:
7785 case ABI_DARWIN:
7786 info_ptr->fp_save_offset = - info_ptr->fp_size;
7787 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
7788
7789 if (TARGET_ALTIVEC_ABI)
7790 {
7791 info_ptr->vrsave_save_offset
7792 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
7793
7794 /* Align stack so vector save area is on a quadword boundary. */
7795 if (info_ptr->altivec_size != 0)
7796 info_ptr->altivec_padding_size
7797 = 16 - (-info_ptr->vrsave_save_offset % 16);
7798 else
7799 info_ptr->altivec_padding_size = 0;
7800
7801 info_ptr->altivec_save_offset
7802 = info_ptr->vrsave_save_offset
7803 - info_ptr->altivec_padding_size
7804 - info_ptr->altivec_size;
7805
7806 /* Adjust for AltiVec case. */
7807 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
7808 }
7809 else
7810 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
7811 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
7812 info_ptr->lr_save_offset = 2*reg_size;
7813 break;
7814
7815 case ABI_V4:
7816 info_ptr->fp_save_offset = - info_ptr->fp_size;
7817 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
7818 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
7819
7820 if (TARGET_ALTIVEC_ABI)
7821 {
7822 info_ptr->vrsave_save_offset
7823 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
7824
7825 /* Align stack so vector save area is on a quadword boundary. */
7826 if (info_ptr->altivec_size != 0)
7827 info_ptr->altivec_padding_size
7828 = 16 - (-info_ptr->vrsave_save_offset % 16);
7829 else
7830 info_ptr->altivec_padding_size = 0;
7831
7832 info_ptr->altivec_save_offset
7833 = info_ptr->vrsave_save_offset
7834 - info_ptr->altivec_padding_size
7835 - info_ptr->altivec_size;
7836
7837 /* Adjust for AltiVec case. */
7838 info_ptr->toc_save_offset
7839 = info_ptr->altivec_save_offset - info_ptr->toc_size;
7840 }
7841 else
7842 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
7843 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
7844 info_ptr->lr_save_offset = reg_size;
7845 break;
7846 }
7847
7848 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
7849 + info_ptr->gp_size
7850 + info_ptr->altivec_size
7851 + info_ptr->altivec_padding_size
7852 + info_ptr->vrsave_size
7853 + ehrd_size
7854 + info_ptr->cr_size
7855 + info_ptr->lr_size
7856 + info_ptr->vrsave_size
7857 + info_ptr->toc_size,
7858 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
7859 ? 16 : 8);
7860
7861 total_raw_size = (info_ptr->vars_size
7862 + info_ptr->parm_size
7863 + info_ptr->save_size
7864 + info_ptr->varargs_size
7865 + info_ptr->fixed_size);
7866
7867 info_ptr->total_size =
7868 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
7869
7870 /* Determine if we need to allocate any stack frame:
7871
7872 For AIX we need to push the stack if a frame pointer is needed
7873 (because the stack might be dynamically adjusted), if we are
7874 debugging, if we make calls, or if the sum of fp_save, gp_save,
7875 and local variables are more than the space needed to save all
7876 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
7877 + 18*8 = 288 (GPR13 reserved).
7878
7879 For V.4 we don't have the stack cushion that AIX uses, but assume
7880 that the debugger can handle stackless frames. */
7881
7882 if (info_ptr->calls_p)
7883 info_ptr->push_p = 1;
7884
7885 else if (abi == ABI_V4)
7886 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
7887
7888 else
7889 info_ptr->push_p = (frame_pointer_needed
7890 || (abi != ABI_DARWIN && write_symbols != NO_DEBUG)
7891 || ((total_raw_size - info_ptr->fixed_size)
7892 > (TARGET_32BIT ? 220 : 288)));
7893
7894 /* Zero offsets if we're not saving those registers. */
7895 if (info_ptr->fp_size == 0)
7896 info_ptr->fp_save_offset = 0;
7897
7898 if (info_ptr->gp_size == 0)
7899 info_ptr->gp_save_offset = 0;
7900
7901 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
7902 info_ptr->altivec_save_offset = 0;
7903
7904 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
7905 info_ptr->vrsave_save_offset = 0;
7906
7907 if (! info_ptr->lr_save_p)
7908 info_ptr->lr_save_offset = 0;
7909
7910 if (! info_ptr->cr_save_p)
7911 info_ptr->cr_save_offset = 0;
7912
7913 if (! info_ptr->toc_save_p)
7914 info_ptr->toc_save_offset = 0;
7915
7916 return info_ptr;
7917 }
7918
7919 void
7920 debug_stack_info (info)
7921 rs6000_stack_t *info;
7922 {
7923 const char *abi_string;
7924
7925 if (! info)
7926 info = rs6000_stack_info ();
7927
7928 fprintf (stderr, "\nStack information for function %s:\n",
7929 ((current_function_decl && DECL_NAME (current_function_decl))
7930 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
7931 : "<unknown>"));
7932
7933 switch (info->abi)
7934 {
7935 default: abi_string = "Unknown"; break;
7936 case ABI_NONE: abi_string = "NONE"; break;
7937 case ABI_AIX:
7938 case ABI_AIX_NODESC: abi_string = "AIX"; break;
7939 case ABI_DARWIN: abi_string = "Darwin"; break;
7940 case ABI_V4: abi_string = "V.4"; break;
7941 }
7942
7943 fprintf (stderr, "\tABI = %5s\n", abi_string);
7944
7945 if (TARGET_ALTIVEC_ABI)
7946 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
7947
7948 if (info->first_gp_reg_save != 32)
7949 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
7950
7951 if (info->first_fp_reg_save != 64)
7952 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
7953
7954 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
7955 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
7956 info->first_altivec_reg_save);
7957
7958 if (info->lr_save_p)
7959 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
7960
7961 if (info->cr_save_p)
7962 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
7963
7964 if (info->toc_save_p)
7965 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
7966
7967 if (info->vrsave_mask)
7968 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
7969
7970 if (info->push_p)
7971 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
7972
7973 if (info->calls_p)
7974 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
7975
7976 if (info->gp_save_offset)
7977 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
7978
7979 if (info->fp_save_offset)
7980 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
7981
7982 if (info->altivec_save_offset)
7983 fprintf (stderr, "\taltivec_save_offset = %5d\n",
7984 info->altivec_save_offset);
7985
7986 if (info->vrsave_save_offset)
7987 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
7988 info->vrsave_save_offset);
7989
7990 if (info->lr_save_offset)
7991 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
7992
7993 if (info->cr_save_offset)
7994 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
7995
7996 if (info->toc_save_offset)
7997 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
7998
7999 if (info->varargs_save_offset)
8000 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
8001
8002 if (info->total_size)
8003 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
8004
8005 if (info->varargs_size)
8006 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
8007
8008 if (info->vars_size)
8009 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
8010
8011 if (info->parm_size)
8012 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
8013
8014 if (info->fixed_size)
8015 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
8016
8017 if (info->gp_size)
8018 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
8019
8020 if (info->fp_size)
8021 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
8022
8023 if (info->altivec_size)
8024 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
8025
8026 if (info->vrsave_size)
8027 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
8028
8029 if (info->altivec_padding_size)
8030 fprintf (stderr, "\taltivec_padding_size= %5d\n",
8031 info->altivec_padding_size);
8032
8033 if (info->lr_size)
8034 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
8035
8036 if (info->cr_size)
8037 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
8038
8039 if (info->toc_size)
8040 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
8041
8042 if (info->save_size)
8043 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
8044
8045 if (info->reg_size != 4)
8046 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
8047
8048 fprintf (stderr, "\n");
8049 }
8050
8051 rtx
8052 rs6000_return_addr (count, frame)
8053 int count;
8054 rtx frame;
8055 {
8056 /* Currently we don't optimize very well between prolog and body
8057 code and for PIC code the code can be actually quite bad, so
8058 don't try to be too clever here. */
8059 if (count != 0
8060 || flag_pic != 0
8061 || DEFAULT_ABI == ABI_AIX
8062 || DEFAULT_ABI == ABI_AIX_NODESC)
8063 {
8064 cfun->machine->ra_needs_full_frame = 1;
8065
8066 return
8067 gen_rtx_MEM
8068 (Pmode,
8069 memory_address
8070 (Pmode,
8071 plus_constant (copy_to_reg
8072 (gen_rtx_MEM (Pmode,
8073 memory_address (Pmode, frame))),
8074 RETURN_ADDRESS_OFFSET)));
8075 }
8076
8077 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
8078 }
8079
8080 static int
8081 rs6000_ra_ever_killed ()
8082 {
8083 rtx top;
8084
8085 #ifdef ASM_OUTPUT_MI_THUNK
8086 if (current_function_is_thunk)
8087 return 0;
8088 #endif
8089 if (!has_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM)
8090 || cfun->machine->ra_needs_full_frame)
8091 return regs_ever_live[LINK_REGISTER_REGNUM];
8092
8093 push_topmost_sequence ();
8094 top = get_insns ();
8095 pop_topmost_sequence ();
8096
8097 return reg_set_between_p (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8098 top, NULL_RTX);
8099 }
8100 \f
8101 /* Add a REG_MAYBE_DEAD note to the insn. */
8102 static void
8103 rs6000_maybe_dead (insn)
8104 rtx insn;
8105 {
8106 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
8107 const0_rtx,
8108 REG_NOTES (insn));
8109 }
8110
8111 /* Emit instructions needed to load the TOC register.
8112 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
8113 a constant pool; or for SVR4 -fpic. */
8114
8115 void
8116 rs6000_emit_load_toc_table (fromprolog)
8117 int fromprolog;
8118 {
8119 rtx dest;
8120 dest = gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
8121
8122 if (TARGET_ELF && DEFAULT_ABI != ABI_AIX)
8123 {
8124 if (DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8125 {
8126 rtx temp = (fromprolog
8127 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
8128 : gen_reg_rtx (Pmode));
8129 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp)));
8130 rs6000_maybe_dead (emit_move_insn (dest, temp));
8131 }
8132 else if (flag_pic == 2)
8133 {
8134 char buf[30];
8135 rtx tempLR = (fromprolog
8136 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
8137 : gen_reg_rtx (Pmode));
8138 rtx temp0 = (fromprolog
8139 ? gen_rtx_REG (Pmode, 0)
8140 : gen_reg_rtx (Pmode));
8141 rtx symF;
8142
8143 /* possibly create the toc section */
8144 if (! toc_initialized)
8145 {
8146 toc_section ();
8147 function_section (current_function_decl);
8148 }
8149
8150 if (fromprolog)
8151 {
8152 rtx symL;
8153
8154 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
8155 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8156
8157 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
8158 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8159
8160 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
8161 symF)));
8162 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
8163 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
8164 symL,
8165 symF)));
8166 }
8167 else
8168 {
8169 rtx tocsym;
8170 static int reload_toc_labelno = 0;
8171
8172 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
8173
8174 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
8175 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8176
8177 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR,
8178 symF,
8179 tocsym)));
8180 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
8181 rs6000_maybe_dead (emit_move_insn (temp0,
8182 gen_rtx_MEM (Pmode, dest)));
8183 }
8184 rs6000_maybe_dead (emit_insn (gen_addsi3 (dest, temp0, dest)));
8185 }
8186 else if (flag_pic == 0 && TARGET_MINIMAL_TOC)
8187 {
8188 /* This is for AIX code running in non-PIC ELF. */
8189 char buf[30];
8190 rtx realsym;
8191 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
8192 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8193
8194 rs6000_maybe_dead (emit_insn (gen_elf_high (dest, realsym)));
8195 rs6000_maybe_dead (emit_insn (gen_elf_low (dest, dest, realsym)));
8196 }
8197 else
8198 abort ();
8199 }
8200 else
8201 {
8202 if (TARGET_32BIT)
8203 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest)));
8204 else
8205 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest)));
8206 }
8207 }
8208
8209 int
8210 get_TOC_alias_set ()
8211 {
8212 static int set = -1;
8213 if (set == -1)
8214 set = new_alias_set ();
8215 return set;
8216 }
8217
8218 /* This retuns nonzero if the current function uses the TOC. This is
8219 determined by the presence of (unspec ... 7), which is generated by
8220 the various load_toc_* patterns. */
8221
8222 int
8223 uses_TOC ()
8224 {
8225 rtx insn;
8226
8227 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8228 if (INSN_P (insn))
8229 {
8230 rtx pat = PATTERN (insn);
8231 int i;
8232
8233 if (GET_CODE (pat) == PARALLEL)
8234 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
8235 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
8236 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
8237 return 1;
8238 }
8239 return 0;
8240 }
8241
8242 rtx
8243 create_TOC_reference (symbol)
8244 rtx symbol;
8245 {
8246 return gen_rtx_PLUS (Pmode,
8247 gen_rtx_REG (Pmode, TOC_REGISTER),
8248 gen_rtx_CONST (Pmode,
8249 gen_rtx_MINUS (Pmode, symbol,
8250 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
8251 }
8252
8253 #if TARGET_AIX
8254 /* __throw will restore its own return address to be the same as the
8255 return address of the function that the throw is being made to.
8256 This is unfortunate, because we want to check the original
8257 return address to see if we need to restore the TOC.
8258 So we have to squirrel it away here.
8259 This is used only in compiling __throw and __rethrow.
8260
8261 Most of this code should be removed by CSE. */
8262 static rtx insn_after_throw;
8263
8264 /* This does the saving... */
8265 void
8266 rs6000_aix_emit_builtin_unwind_init ()
8267 {
8268 rtx mem;
8269 rtx stack_top = gen_reg_rtx (Pmode);
8270 rtx opcode_addr = gen_reg_rtx (Pmode);
8271
8272 insn_after_throw = gen_reg_rtx (SImode);
8273
8274 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8275 emit_move_insn (stack_top, mem);
8276
8277 mem = gen_rtx_MEM (Pmode,
8278 gen_rtx_PLUS (Pmode, stack_top,
8279 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8280 emit_move_insn (opcode_addr, mem);
8281 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
8282 }
8283
8284 /* Emit insns to _restore_ the TOC register, at runtime (specifically
8285 in _eh.o). Only used on AIX.
8286
8287 The idea is that on AIX, function calls look like this:
8288 bl somefunction-trampoline
8289 lwz r2,20(sp)
8290
8291 and later,
8292 somefunction-trampoline:
8293 stw r2,20(sp)
8294 ... load function address in the count register ...
8295 bctr
8296 or like this, if the linker determines that this is not a cross-module call
8297 and so the TOC need not be restored:
8298 bl somefunction
8299 nop
8300 or like this, if the compiler could determine that this is not a
8301 cross-module call:
8302 bl somefunction
8303 now, the tricky bit here is that register 2 is saved and restored
8304 by the _linker_, so we can't readily generate debugging information
8305 for it. So we need to go back up the call chain looking at the
8306 insns at return addresses to see which calls saved the TOC register
8307 and so see where it gets restored from.
8308
8309 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
8310 just before the actual epilogue.
8311
8312 On the bright side, this incurs no space or time overhead unless an
8313 exception is thrown, except for the extra code in libgcc.a.
8314
8315 The parameter STACKSIZE is a register containing (at runtime)
8316 the amount to be popped off the stack in addition to the stack frame
8317 of this routine (which will be __throw or __rethrow, and so is
8318 guaranteed to have a stack frame). */
8319
8320 void
8321 rs6000_emit_eh_toc_restore (stacksize)
8322 rtx stacksize;
8323 {
8324 rtx top_of_stack;
8325 rtx bottom_of_stack = gen_reg_rtx (Pmode);
8326 rtx tocompare = gen_reg_rtx (SImode);
8327 rtx opcode = gen_reg_rtx (SImode);
8328 rtx opcode_addr = gen_reg_rtx (Pmode);
8329 rtx mem;
8330 rtx loop_start = gen_label_rtx ();
8331 rtx no_toc_restore_needed = gen_label_rtx ();
8332 rtx loop_exit = gen_label_rtx ();
8333
8334 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8335 set_mem_alias_set (mem, rs6000_sr_alias_set);
8336 emit_move_insn (bottom_of_stack, mem);
8337
8338 top_of_stack = expand_binop (Pmode, add_optab,
8339 bottom_of_stack, stacksize,
8340 NULL_RTX, 1, OPTAB_WIDEN);
8341
8342 emit_move_insn (tocompare,
8343 GEN_INT (trunc_int_for_mode (TARGET_32BIT
8344 ? 0x80410014
8345 : 0xE8410028, SImode)));
8346
8347 if (insn_after_throw == NULL_RTX)
8348 abort ();
8349 emit_move_insn (opcode, insn_after_throw);
8350
8351 emit_note (NULL, NOTE_INSN_LOOP_BEG);
8352 emit_label (loop_start);
8353
8354 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
8355 SImode, NULL_RTX, NULL_RTX,
8356 no_toc_restore_needed);
8357
8358 mem = gen_rtx_MEM (Pmode,
8359 gen_rtx_PLUS (Pmode, bottom_of_stack,
8360 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
8361 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
8362
8363 emit_label (no_toc_restore_needed);
8364 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
8365 Pmode, NULL_RTX, NULL_RTX,
8366 loop_exit);
8367
8368 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
8369 set_mem_alias_set (mem, rs6000_sr_alias_set);
8370 emit_move_insn (bottom_of_stack, mem);
8371
8372 mem = gen_rtx_MEM (Pmode,
8373 gen_rtx_PLUS (Pmode, bottom_of_stack,
8374 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8375 emit_move_insn (opcode_addr, mem);
8376 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
8377
8378 emit_note (NULL, NOTE_INSN_LOOP_CONT);
8379 emit_jump (loop_start);
8380 emit_note (NULL, NOTE_INSN_LOOP_END);
8381 emit_label (loop_exit);
8382 }
8383 #endif /* TARGET_AIX */
8384 \f
8385 /* This ties together stack memory (MEM with an alias set of
8386 rs6000_sr_alias_set) and the change to the stack pointer. */
8387
8388 static void
8389 rs6000_emit_stack_tie ()
8390 {
8391 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
8392
8393 set_mem_alias_set (mem, rs6000_sr_alias_set);
8394 emit_insn (gen_stack_tie (mem));
8395 }
8396
8397 /* Emit the correct code for allocating stack space, as insns.
8398 If COPY_R12, make sure a copy of the old frame is left in r12.
8399 The generated code may use hard register 0 as a temporary. */
8400
8401 static void
8402 rs6000_emit_allocate_stack (size, copy_r12)
8403 HOST_WIDE_INT size;
8404 int copy_r12;
8405 {
8406 rtx insn;
8407 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8408 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
8409 rtx todec = GEN_INT (-size);
8410
8411 if (current_function_limit_stack)
8412 {
8413 if (REG_P (stack_limit_rtx)
8414 && REGNO (stack_limit_rtx) > 1
8415 && REGNO (stack_limit_rtx) <= 31)
8416 {
8417 emit_insn (Pmode == SImode
8418 ? gen_addsi3 (tmp_reg,
8419 stack_limit_rtx,
8420 GEN_INT (size))
8421 : gen_adddi3 (tmp_reg,
8422 stack_limit_rtx,
8423 GEN_INT (size)));
8424
8425 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8426 const0_rtx));
8427 }
8428 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
8429 && TARGET_32BIT
8430 && DEFAULT_ABI == ABI_V4)
8431 {
8432 rtx toload = gen_rtx_CONST (VOIDmode,
8433 gen_rtx_PLUS (Pmode,
8434 stack_limit_rtx,
8435 GEN_INT (size)));
8436
8437 emit_insn (gen_elf_high (tmp_reg, toload));
8438 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
8439 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8440 const0_rtx));
8441 }
8442 else
8443 warning ("stack limit expression is not supported");
8444 }
8445
8446 if (copy_r12 || ! TARGET_UPDATE)
8447 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
8448
8449 if (TARGET_UPDATE)
8450 {
8451 if (size > 32767)
8452 {
8453 /* Need a note here so that try_split doesn't get confused. */
8454 if (get_last_insn() == NULL_RTX)
8455 emit_note (0, NOTE_INSN_DELETED);
8456 insn = emit_move_insn (tmp_reg, todec);
8457 try_split (PATTERN (insn), insn, 0);
8458 todec = tmp_reg;
8459 }
8460
8461 if (Pmode == SImode)
8462 insn = emit_insn (gen_movsi_update (stack_reg, stack_reg,
8463 todec, stack_reg));
8464 else
8465 insn = emit_insn (gen_movdi_update (stack_reg, stack_reg,
8466 todec, stack_reg));
8467 }
8468 else
8469 {
8470 if (Pmode == SImode)
8471 insn = emit_insn (gen_addsi3 (stack_reg, stack_reg, todec));
8472 else
8473 insn = emit_insn (gen_adddi3 (stack_reg, stack_reg, todec));
8474 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
8475 gen_rtx_REG (Pmode, 12));
8476 }
8477
8478 RTX_FRAME_RELATED_P (insn) = 1;
8479 REG_NOTES (insn) =
8480 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8481 gen_rtx_SET (VOIDmode, stack_reg,
8482 gen_rtx_PLUS (Pmode, stack_reg,
8483 GEN_INT (-size))),
8484 REG_NOTES (insn));
8485 }
8486
8487 /* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
8488 knows that:
8489
8490 (mem (plus (blah) (regXX)))
8491
8492 is really:
8493
8494 (mem (plus (blah) (const VALUE_OF_REGXX))). */
8495
8496 static void
8497 altivec_frame_fixup (insn, reg, val)
8498 rtx insn, reg;
8499 HOST_WIDE_INT val;
8500 {
8501 rtx real;
8502
8503 real = copy_rtx (PATTERN (insn));
8504
8505 real = replace_rtx (real, reg, GEN_INT (val));
8506
8507 RTX_FRAME_RELATED_P (insn) = 1;
8508 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8509 real,
8510 REG_NOTES (insn));
8511 }
8512
8513 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
8514 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
8515 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
8516 deduce these equivalences by itself so it wasn't necessary to hold
8517 its hand so much. */
8518
8519 static void
8520 rs6000_frame_related (insn, reg, val, reg2, rreg)
8521 rtx insn;
8522 rtx reg;
8523 HOST_WIDE_INT val;
8524 rtx reg2;
8525 rtx rreg;
8526 {
8527 rtx real, temp;
8528
8529 real = copy_rtx (PATTERN (insn));
8530
8531 real = replace_rtx (real, reg,
8532 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
8533 STACK_POINTER_REGNUM),
8534 GEN_INT (val)));
8535
8536 /* We expect that 'real' is either a SET or a PARALLEL containing
8537 SETs (and possibly other stuff). In a PARALLEL, all the SETs
8538 are important so they all have to be marked RTX_FRAME_RELATED_P. */
8539
8540 if (GET_CODE (real) == SET)
8541 {
8542 rtx set = real;
8543
8544 temp = simplify_rtx (SET_SRC (set));
8545 if (temp)
8546 SET_SRC (set) = temp;
8547 temp = simplify_rtx (SET_DEST (set));
8548 if (temp)
8549 SET_DEST (set) = temp;
8550 if (GET_CODE (SET_DEST (set)) == MEM)
8551 {
8552 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8553 if (temp)
8554 XEXP (SET_DEST (set), 0) = temp;
8555 }
8556 }
8557 else if (GET_CODE (real) == PARALLEL)
8558 {
8559 int i;
8560 for (i = 0; i < XVECLEN (real, 0); i++)
8561 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
8562 {
8563 rtx set = XVECEXP (real, 0, i);
8564
8565 temp = simplify_rtx (SET_SRC (set));
8566 if (temp)
8567 SET_SRC (set) = temp;
8568 temp = simplify_rtx (SET_DEST (set));
8569 if (temp)
8570 SET_DEST (set) = temp;
8571 if (GET_CODE (SET_DEST (set)) == MEM)
8572 {
8573 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8574 if (temp)
8575 XEXP (SET_DEST (set), 0) = temp;
8576 }
8577 RTX_FRAME_RELATED_P (set) = 1;
8578 }
8579 }
8580 else
8581 abort ();
8582
8583 if (reg2 != NULL_RTX)
8584 real = replace_rtx (real, reg2, rreg);
8585
8586 RTX_FRAME_RELATED_P (insn) = 1;
8587 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8588 real,
8589 REG_NOTES (insn));
8590 }
8591
8592 /* Returns an insn that has a vrsave set operation with the
8593 appropriate CLOBBERs. */
8594
8595 static rtx
8596 generate_set_vrsave (reg, info, epiloguep)
8597 rtx reg;
8598 rs6000_stack_t *info;
8599 int epiloguep;
8600 {
8601 int nclobs, i;
8602 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
8603 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8604
8605 clobs[0]
8606 = gen_rtx_SET (VOIDmode,
8607 vrsave,
8608 gen_rtx_UNSPEC_VOLATILE (SImode,
8609 gen_rtvec (2, reg, vrsave),
8610 30));
8611
8612 nclobs = 1;
8613
8614 /* We need to clobber the registers in the mask so the scheduler
8615 does not move sets to VRSAVE before sets of AltiVec registers.
8616
8617 However, if the function receives nonlocal gotos, reload will set
8618 all call saved registers live. We will end up with:
8619
8620 (set (reg 999) (mem))
8621 (parallel [ (set (reg vrsave) (unspec blah))
8622 (clobber (reg 999))])
8623
8624 The clobber will cause the store into reg 999 to be dead, and
8625 flow will attempt to delete an epilogue insn. In this case, we
8626 need an unspec use/set of the register. */
8627
8628 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8629 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
8630 {
8631 if (!epiloguep || call_used_regs [i])
8632 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
8633 gen_rtx_REG (V4SImode, i));
8634 else
8635 {
8636 rtx reg = gen_rtx_REG (V4SImode, i);
8637
8638 clobs[nclobs++]
8639 = gen_rtx_SET (VOIDmode,
8640 reg,
8641 gen_rtx_UNSPEC (V4SImode,
8642 gen_rtvec (1, reg), 27));
8643 }
8644 }
8645
8646 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
8647
8648 for (i = 0; i < nclobs; ++i)
8649 XVECEXP (insn, 0, i) = clobs[i];
8650
8651 return insn;
8652 }
8653
8654 /* Emit function prologue as insns. */
8655
8656 void
8657 rs6000_emit_prologue ()
8658 {
8659 rs6000_stack_t *info = rs6000_stack_info ();
8660 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
8661 int reg_size = TARGET_POWERPC64 ? 8 : 4;
8662 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8663 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
8664 rtx frame_reg_rtx = sp_reg_rtx;
8665 rtx cr_save_rtx = NULL;
8666 rtx insn;
8667 int saving_FPRs_inline;
8668 int using_store_multiple;
8669 HOST_WIDE_INT sp_offset = 0;
8670
8671 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
8672 && info->first_gp_reg_save < 31);
8673 saving_FPRs_inline = (info->first_fp_reg_save == 64
8674 || FP_SAVE_INLINE (info->first_fp_reg_save));
8675
8676 /* For V.4, update stack before we do any saving and set back pointer. */
8677 if (info->push_p && DEFAULT_ABI == ABI_V4)
8678 {
8679 if (info->total_size < 32767)
8680 sp_offset = info->total_size;
8681 else
8682 frame_reg_rtx = frame_ptr_rtx;
8683 rs6000_emit_allocate_stack (info->total_size,
8684 (frame_reg_rtx != sp_reg_rtx
8685 && (info->cr_save_p
8686 || info->lr_save_p
8687 || info->first_fp_reg_save < 64
8688 || info->first_gp_reg_save < 32
8689 )));
8690 if (frame_reg_rtx != sp_reg_rtx)
8691 rs6000_emit_stack_tie ();
8692 }
8693
8694 /* Save AltiVec registers if needed. */
8695 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
8696 {
8697 int i;
8698
8699 /* There should be a non inline version of this, for when we
8700 are saving lots of vector registers. */
8701 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
8702 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
8703 {
8704 rtx areg, savereg, mem;
8705 int offset;
8706
8707 offset = info->altivec_save_offset + sp_offset
8708 + 16 * (i - info->first_altivec_reg_save);
8709
8710 savereg = gen_rtx_REG (V4SImode, i);
8711
8712 areg = gen_rtx_REG (Pmode, 0);
8713 emit_move_insn (areg, GEN_INT (offset));
8714
8715 /* AltiVec addressing mode is [reg+reg]. */
8716 mem = gen_rtx_MEM (V4SImode,
8717 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
8718
8719 set_mem_alias_set (mem, rs6000_sr_alias_set);
8720
8721 insn = emit_move_insn (mem, savereg);
8722
8723 altivec_frame_fixup (insn, areg, offset);
8724 }
8725 }
8726
8727 /* VRSAVE is a bit vector representing which AltiVec registers
8728 are used. The OS uses this to determine which vector
8729 registers to save on a context switch. We need to save
8730 VRSAVE on the stack frame, add whatever AltiVec registers we
8731 used in this function, and do the corresponding magic in the
8732 epilogue. */
8733
8734 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
8735 {
8736 rtx reg, mem, vrsave;
8737 int offset;
8738
8739 /* Get VRSAVE onto a GPR. */
8740 reg = gen_rtx_REG (SImode, 12);
8741 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8742 if (TARGET_MACHO)
8743 emit_insn (gen_get_vrsave_internal (reg));
8744 else
8745 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
8746
8747 /* Save VRSAVE. */
8748 offset = info->vrsave_save_offset + sp_offset;
8749 mem
8750 = gen_rtx_MEM (SImode,
8751 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
8752 set_mem_alias_set (mem, rs6000_sr_alias_set);
8753 insn = emit_move_insn (mem, reg);
8754
8755 /* Include the registers in the mask. */
8756 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
8757
8758 insn = emit_insn (generate_set_vrsave (reg, info, 0));
8759 }
8760
8761 /* If we use the link register, get it into r0. */
8762 if (info->lr_save_p)
8763 emit_move_insn (gen_rtx_REG (Pmode, 0),
8764 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8765
8766 /* If we need to save CR, put it into r12. */
8767 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
8768 {
8769 cr_save_rtx = gen_rtx_REG (SImode, 12);
8770 emit_insn (gen_movesi_from_cr (cr_save_rtx));
8771 }
8772
8773 /* Do any required saving of fpr's. If only one or two to save, do
8774 it ourselves. Otherwise, call function. */
8775 if (saving_FPRs_inline)
8776 {
8777 int i;
8778 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8779 if ((regs_ever_live[info->first_fp_reg_save+i]
8780 && ! call_used_regs[info->first_fp_reg_save+i]))
8781 {
8782 rtx addr, reg, mem;
8783 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8784 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8785 GEN_INT (info->fp_save_offset
8786 + sp_offset
8787 + 8 * i));
8788 mem = gen_rtx_MEM (DFmode, addr);
8789 set_mem_alias_set (mem, rs6000_sr_alias_set);
8790
8791 insn = emit_move_insn (mem, reg);
8792 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8793 NULL_RTX, NULL_RTX);
8794 }
8795 }
8796 else if (info->first_fp_reg_save != 64)
8797 {
8798 int i;
8799 char rname[30];
8800 const char *alloc_rname;
8801 rtvec p;
8802 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
8803
8804 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
8805 gen_rtx_REG (Pmode,
8806 LINK_REGISTER_REGNUM));
8807 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
8808 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
8809 alloc_rname = ggc_strdup (rname);
8810 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
8811 gen_rtx_SYMBOL_REF (Pmode,
8812 alloc_rname));
8813 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8814 {
8815 rtx addr, reg, mem;
8816 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8817 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8818 GEN_INT (info->fp_save_offset
8819 + sp_offset + 8*i));
8820 mem = gen_rtx_MEM (DFmode, addr);
8821 set_mem_alias_set (mem, rs6000_sr_alias_set);
8822
8823 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
8824 }
8825 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8826 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8827 NULL_RTX, NULL_RTX);
8828 }
8829
8830 /* Save GPRs. This is done as a PARALLEL if we are using
8831 the store-multiple instructions. */
8832 if (using_store_multiple)
8833 {
8834 rtvec p, dwarfp;
8835 int i;
8836 p = rtvec_alloc (32 - info->first_gp_reg_save);
8837 dwarfp = rtvec_alloc (32 - info->first_gp_reg_save);
8838 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8839 {
8840 rtx addr, reg, mem;
8841 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8842 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8843 GEN_INT (info->gp_save_offset
8844 + sp_offset
8845 + reg_size * i));
8846 mem = gen_rtx_MEM (reg_mode, addr);
8847 set_mem_alias_set (mem, rs6000_sr_alias_set);
8848
8849 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
8850 }
8851 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8852 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8853 NULL_RTX, NULL_RTX);
8854 }
8855 else
8856 {
8857 int i;
8858 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8859 if ((regs_ever_live[info->first_gp_reg_save+i]
8860 && ! call_used_regs[info->first_gp_reg_save+i])
8861 || (i+info->first_gp_reg_save == PIC_OFFSET_TABLE_REGNUM
8862 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8863 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
8864 {
8865 rtx addr, reg, mem;
8866 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8867 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8868 GEN_INT (info->gp_save_offset
8869 + sp_offset
8870 + reg_size * i));
8871 mem = gen_rtx_MEM (reg_mode, addr);
8872 set_mem_alias_set (mem, rs6000_sr_alias_set);
8873
8874 insn = emit_move_insn (mem, reg);
8875 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8876 NULL_RTX, NULL_RTX);
8877 }
8878 }
8879
8880 /* ??? There's no need to emit actual instructions here, but it's the
8881 easiest way to get the frame unwind information emitted. */
8882 if (current_function_calls_eh_return)
8883 {
8884 unsigned int i, regno;
8885
8886 for (i = 0; ; ++i)
8887 {
8888 rtx addr, reg, mem;
8889
8890 regno = EH_RETURN_DATA_REGNO (i);
8891 if (regno == INVALID_REGNUM)
8892 break;
8893
8894 reg = gen_rtx_REG (reg_mode, regno);
8895 addr = plus_constant (frame_reg_rtx,
8896 info->ehrd_offset + sp_offset
8897 + reg_size * (int) i);
8898 mem = gen_rtx_MEM (reg_mode, addr);
8899 set_mem_alias_set (mem, rs6000_sr_alias_set);
8900
8901 insn = emit_move_insn (mem, reg);
8902 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8903 NULL_RTX, NULL_RTX);
8904 }
8905 }
8906
8907 /* Save lr if we used it. */
8908 if (info->lr_save_p)
8909 {
8910 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8911 GEN_INT (info->lr_save_offset + sp_offset));
8912 rtx reg = gen_rtx_REG (Pmode, 0);
8913 rtx mem = gen_rtx_MEM (Pmode, addr);
8914 /* This should not be of rs6000_sr_alias_set, because of
8915 __builtin_return_address. */
8916
8917 insn = emit_move_insn (mem, reg);
8918 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8919 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8920 }
8921
8922 /* Save CR if we use any that must be preserved. */
8923 if (info->cr_save_p)
8924 {
8925 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8926 GEN_INT (info->cr_save_offset + sp_offset));
8927 rtx mem = gen_rtx_MEM (SImode, addr);
8928
8929 set_mem_alias_set (mem, rs6000_sr_alias_set);
8930
8931 /* If r12 was used to hold the original sp, copy cr into r0 now
8932 that it's free. */
8933 if (REGNO (frame_reg_rtx) == 12)
8934 {
8935 cr_save_rtx = gen_rtx_REG (SImode, 0);
8936 emit_insn (gen_movesi_from_cr (cr_save_rtx));
8937 }
8938 insn = emit_move_insn (mem, cr_save_rtx);
8939
8940 /* Now, there's no way that dwarf2out_frame_debug_expr is going
8941 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
8942 OK. All we have to do is specify that _one_ condition code
8943 register is saved in this stack slot. The thrower's epilogue
8944 will then restore all the call-saved registers.
8945 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
8946 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8947 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
8948 }
8949
8950 /* Update stack and set back pointer unless this is V.4,
8951 for which it was done previously. */
8952 if (info->push_p && DEFAULT_ABI != ABI_V4)
8953 rs6000_emit_allocate_stack (info->total_size, FALSE);
8954
8955 /* Set frame pointer, if needed. */
8956 if (frame_pointer_needed)
8957 {
8958 insn = emit_move_insn (gen_rtx_REG (reg_mode, FRAME_POINTER_REGNUM),
8959 sp_reg_rtx);
8960 RTX_FRAME_RELATED_P (insn) = 1;
8961 }
8962
8963 /* If we are using PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
8964 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
8965 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
8966 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM]))
8967 {
8968 /* If emit_load_toc_table will use the link register, we need to save
8969 it. We use R11 for this purpose because emit_load_toc_table
8970 can use register 0. This allows us to use a plain 'blr' to return
8971 from the procedure more often. */
8972 int save_LR_around_toc_setup = (TARGET_ELF && flag_pic != 0
8973 && ! info->lr_save_p
8974 && EXIT_BLOCK_PTR->pred != NULL);
8975 if (save_LR_around_toc_setup)
8976 emit_move_insn (gen_rtx_REG (Pmode, 11),
8977 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8978
8979 rs6000_emit_load_toc_table (TRUE);
8980
8981 if (save_LR_around_toc_setup)
8982 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8983 gen_rtx_REG (Pmode, 11));
8984 }
8985
8986 if (DEFAULT_ABI == ABI_DARWIN
8987 && flag_pic && current_function_uses_pic_offset_table)
8988 {
8989 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
8990
8991 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest)));
8992
8993 rs6000_maybe_dead (
8994 emit_move_insn (gen_rtx_REG (Pmode, PIC_OFFSET_TABLE_REGNUM),
8995 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
8996 }
8997 }
8998
8999 /* Write function prologue. */
9000
9001 static void
9002 rs6000_output_function_prologue (file, size)
9003 FILE *file;
9004 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
9005 {
9006 rs6000_stack_t *info = rs6000_stack_info ();
9007
9008 if (TARGET_DEBUG_STACK)
9009 debug_stack_info (info);
9010
9011 /* Write .extern for any function we will call to save and restore
9012 fp values. */
9013 if (info->first_fp_reg_save < 64
9014 && !FP_SAVE_INLINE (info->first_fp_reg_save))
9015 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
9016 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
9017 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
9018 RESTORE_FP_SUFFIX);
9019
9020 /* Write .extern for AIX common mode routines, if needed. */
9021 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
9022 {
9023 fputs ("\t.extern __mulh\n", file);
9024 fputs ("\t.extern __mull\n", file);
9025 fputs ("\t.extern __divss\n", file);
9026 fputs ("\t.extern __divus\n", file);
9027 fputs ("\t.extern __quoss\n", file);
9028 fputs ("\t.extern __quous\n", file);
9029 common_mode_defined = 1;
9030 }
9031
9032 if (! HAVE_prologue)
9033 {
9034 start_sequence ();
9035
9036 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
9037 the "toplevel" insn chain. */
9038 emit_note (0, NOTE_INSN_DELETED);
9039 rs6000_emit_prologue ();
9040 emit_note (0, NOTE_INSN_DELETED);
9041
9042 if (TARGET_DEBUG_STACK)
9043 debug_rtx_list (get_insns (), 100);
9044 final (get_insns (), file, FALSE, FALSE);
9045 end_sequence ();
9046 }
9047
9048 rs6000_pic_labelno++;
9049 }
9050
9051 /* Emit function epilogue as insns.
9052
9053 At present, dwarf2out_frame_debug_expr doesn't understand
9054 register restores, so we don't bother setting RTX_FRAME_RELATED_P
9055 anywhere in the epilogue. Most of the insns below would in any case
9056 need special notes to explain where r11 is in relation to the stack. */
9057
9058 void
9059 rs6000_emit_epilogue (sibcall)
9060 int sibcall;
9061 {
9062 rs6000_stack_t *info;
9063 int restoring_FPRs_inline;
9064 int using_load_multiple;
9065 int using_mfcr_multiple;
9066 int use_backchain_to_restore_sp;
9067 int sp_offset = 0;
9068 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
9069 rtx frame_reg_rtx = sp_reg_rtx;
9070 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
9071 int reg_size = TARGET_POWERPC64 ? 8 : 4;
9072 int i;
9073
9074 info = rs6000_stack_info ();
9075 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
9076 && info->first_gp_reg_save < 31);
9077 restoring_FPRs_inline = (sibcall
9078 || current_function_calls_eh_return
9079 || info->first_fp_reg_save == 64
9080 || FP_SAVE_INLINE (info->first_fp_reg_save));
9081 use_backchain_to_restore_sp = (frame_pointer_needed
9082 || current_function_calls_alloca
9083 || info->total_size > 32767);
9084 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9085 || rs6000_cpu == PROCESSOR_PPC603
9086 || rs6000_cpu == PROCESSOR_PPC750
9087 || optimize_size);
9088
9089 /* If we have a frame pointer, a call to alloca, or a large stack
9090 frame, restore the old stack pointer using the backchain. Otherwise,
9091 we know what size to update it with. */
9092 if (use_backchain_to_restore_sp)
9093 {
9094 /* Under V.4, don't reset the stack pointer until after we're done
9095 loading the saved registers. */
9096 if (DEFAULT_ABI == ABI_V4)
9097 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
9098
9099 emit_move_insn (frame_reg_rtx,
9100 gen_rtx_MEM (Pmode, sp_reg_rtx));
9101
9102 }
9103 else if (info->push_p)
9104 {
9105 if (DEFAULT_ABI == ABI_V4)
9106 sp_offset = info->total_size;
9107 else
9108 {
9109 emit_insn (TARGET_32BIT
9110 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
9111 GEN_INT (info->total_size))
9112 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
9113 GEN_INT (info->total_size)));
9114 }
9115 }
9116
9117 /* Restore AltiVec registers if needed. */
9118 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
9119 {
9120 int i;
9121
9122 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
9123 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9124 {
9125 rtx addr, areg, mem;
9126
9127 areg = gen_rtx_REG (Pmode, 0);
9128 emit_move_insn
9129 (areg, GEN_INT (info->altivec_save_offset
9130 + sp_offset
9131 + 16 * (i - info->first_altivec_reg_save)));
9132
9133 /* AltiVec addressing mode is [reg+reg]. */
9134 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
9135 mem = gen_rtx_MEM (V4SImode, addr);
9136 set_mem_alias_set (mem, rs6000_sr_alias_set);
9137
9138 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
9139 }
9140 }
9141
9142 /* Restore VRSAVE if needed. */
9143 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
9144 {
9145 rtx addr, mem, reg;
9146
9147 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9148 GEN_INT (info->vrsave_save_offset + sp_offset));
9149 mem = gen_rtx_MEM (SImode, addr);
9150 set_mem_alias_set (mem, rs6000_sr_alias_set);
9151 reg = gen_rtx_REG (SImode, 12);
9152 emit_move_insn (reg, mem);
9153
9154 emit_insn (generate_set_vrsave (reg, info, 1));
9155 }
9156
9157 /* Get the old lr if we saved it. */
9158 if (info->lr_save_p)
9159 {
9160 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9161 GEN_INT (info->lr_save_offset + sp_offset));
9162 rtx mem = gen_rtx_MEM (Pmode, addr);
9163
9164 set_mem_alias_set (mem, rs6000_sr_alias_set);
9165
9166 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
9167 }
9168
9169 /* Get the old cr if we saved it. */
9170 if (info->cr_save_p)
9171 {
9172 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9173 GEN_INT (info->cr_save_offset + sp_offset));
9174 rtx mem = gen_rtx_MEM (SImode, addr);
9175
9176 set_mem_alias_set (mem, rs6000_sr_alias_set);
9177
9178 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
9179 }
9180
9181 /* Set LR here to try to overlap restores below. */
9182 if (info->lr_save_p)
9183 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
9184 gen_rtx_REG (Pmode, 0));
9185
9186 /* Load exception handler data registers, if needed. */
9187 if (current_function_calls_eh_return)
9188 {
9189 unsigned int i, regno;
9190
9191 for (i = 0; ; ++i)
9192 {
9193 rtx addr, mem;
9194
9195 regno = EH_RETURN_DATA_REGNO (i);
9196 if (regno == INVALID_REGNUM)
9197 break;
9198
9199 addr = plus_constant (frame_reg_rtx,
9200 info->ehrd_offset + sp_offset
9201 + reg_size * (int) i);
9202 mem = gen_rtx_MEM (reg_mode, addr);
9203 set_mem_alias_set (mem, rs6000_sr_alias_set);
9204
9205 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
9206 }
9207 }
9208
9209 /* Restore GPRs. This is done as a PARALLEL if we are using
9210 the load-multiple instructions. */
9211 if (using_load_multiple)
9212 {
9213 rtvec p;
9214 p = rtvec_alloc (32 - info->first_gp_reg_save);
9215 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9216 {
9217 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9218 GEN_INT (info->gp_save_offset
9219 + sp_offset
9220 + reg_size * i));
9221 rtx mem = gen_rtx_MEM (reg_mode, addr);
9222
9223 set_mem_alias_set (mem, rs6000_sr_alias_set);
9224
9225 RTVEC_ELT (p, i) =
9226 gen_rtx_SET (VOIDmode,
9227 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
9228 mem);
9229 }
9230 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9231 }
9232 else
9233 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9234 if ((regs_ever_live[info->first_gp_reg_save+i]
9235 && ! call_used_regs[info->first_gp_reg_save+i])
9236 || (i+info->first_gp_reg_save == PIC_OFFSET_TABLE_REGNUM
9237 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9238 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
9239 {
9240 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9241 GEN_INT (info->gp_save_offset
9242 + sp_offset
9243 + reg_size * i));
9244 rtx mem = gen_rtx_MEM (reg_mode, addr);
9245
9246 set_mem_alias_set (mem, rs6000_sr_alias_set);
9247
9248 emit_move_insn (gen_rtx_REG (reg_mode,
9249 info->first_gp_reg_save + i),
9250 mem);
9251 }
9252
9253 /* Restore fpr's if we need to do it without calling a function. */
9254 if (restoring_FPRs_inline)
9255 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9256 if ((regs_ever_live[info->first_fp_reg_save+i]
9257 && ! call_used_regs[info->first_fp_reg_save+i]))
9258 {
9259 rtx addr, mem;
9260 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9261 GEN_INT (info->fp_save_offset
9262 + sp_offset
9263 + 8 * i));
9264 mem = gen_rtx_MEM (DFmode, addr);
9265 set_mem_alias_set (mem, rs6000_sr_alias_set);
9266
9267 emit_move_insn (gen_rtx_REG (DFmode,
9268 info->first_fp_reg_save + i),
9269 mem);
9270 }
9271
9272 /* If we saved cr, restore it here. Just those that were used. */
9273 if (info->cr_save_p)
9274 {
9275 rtx r12_rtx = gen_rtx_REG (SImode, 12);
9276 int count = 0;
9277
9278 if (using_mfcr_multiple)
9279 {
9280 for (i = 0; i < 8; i++)
9281 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9282 count++;
9283 if (count == 0)
9284 abort ();
9285 }
9286
9287 if (using_mfcr_multiple && count > 1)
9288 {
9289 rtvec p;
9290 int ndx;
9291
9292 p = rtvec_alloc (count);
9293
9294 ndx = 0;
9295 for (i = 0; i < 8; i++)
9296 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9297 {
9298 rtvec r = rtvec_alloc (2);
9299 RTVEC_ELT (r, 0) = r12_rtx;
9300 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
9301 RTVEC_ELT (p, ndx) =
9302 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
9303 gen_rtx_UNSPEC (CCmode, r, 20));
9304 ndx++;
9305 }
9306 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9307 if (ndx != count)
9308 abort ();
9309 }
9310 else
9311 for (i = 0; i < 8; i++)
9312 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9313 {
9314 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
9315 CR0_REGNO+i),
9316 r12_rtx));
9317 }
9318 }
9319
9320 /* If this is V.4, unwind the stack pointer after all of the loads
9321 have been done. We need to emit a block here so that sched
9322 doesn't decide to move the sp change before the register restores
9323 (which may not have any obvious dependency on the stack). This
9324 doesn't hurt performance, because there is no scheduling that can
9325 be done after this point. */
9326 if (DEFAULT_ABI == ABI_V4)
9327 {
9328 if (frame_reg_rtx != sp_reg_rtx)
9329 rs6000_emit_stack_tie ();
9330
9331 if (use_backchain_to_restore_sp)
9332 {
9333 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
9334 }
9335 else if (sp_offset != 0)
9336 {
9337 emit_insn (Pmode == SImode
9338 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
9339 GEN_INT (sp_offset))
9340 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
9341 GEN_INT (sp_offset)));
9342 }
9343 }
9344
9345 if (current_function_calls_eh_return)
9346 {
9347 rtx sa = EH_RETURN_STACKADJ_RTX;
9348 emit_insn (Pmode == SImode
9349 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
9350 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
9351 }
9352
9353 if (!sibcall)
9354 {
9355 rtvec p;
9356 if (! restoring_FPRs_inline)
9357 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
9358 else
9359 p = rtvec_alloc (2);
9360
9361 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
9362 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
9363 gen_rtx_REG (Pmode,
9364 LINK_REGISTER_REGNUM));
9365
9366 /* If we have to restore more than two FP registers, branch to the
9367 restore function. It will return to our caller. */
9368 if (! restoring_FPRs_inline)
9369 {
9370 int i;
9371 char rname[30];
9372 const char *alloc_rname;
9373
9374 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
9375 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
9376 alloc_rname = ggc_strdup (rname);
9377 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
9378 gen_rtx_SYMBOL_REF (Pmode,
9379 alloc_rname));
9380
9381 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9382 {
9383 rtx addr, mem;
9384 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
9385 GEN_INT (info->fp_save_offset + 8*i));
9386 mem = gen_rtx_MEM (DFmode, addr);
9387 set_mem_alias_set (mem, rs6000_sr_alias_set);
9388
9389 RTVEC_ELT (p, i+3) =
9390 gen_rtx_SET (VOIDmode,
9391 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
9392 mem);
9393 }
9394 }
9395
9396 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
9397 }
9398 }
9399
9400 /* Write function epilogue. */
9401
9402 static void
9403 rs6000_output_function_epilogue (file, size)
9404 FILE *file;
9405 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
9406 {
9407 rs6000_stack_t *info = rs6000_stack_info ();
9408 int optional_tbtab = (optimize_size || TARGET_ELF) ? 0 : 1;
9409
9410 if (! HAVE_epilogue)
9411 {
9412 rtx insn = get_last_insn ();
9413 /* If the last insn was a BARRIER, we don't have to write anything except
9414 the trace table. */
9415 if (GET_CODE (insn) == NOTE)
9416 insn = prev_nonnote_insn (insn);
9417 if (insn == 0 || GET_CODE (insn) != BARRIER)
9418 {
9419 /* This is slightly ugly, but at least we don't have two
9420 copies of the epilogue-emitting code. */
9421 start_sequence ();
9422
9423 /* A NOTE_INSN_DELETED is supposed to be at the start
9424 and end of the "toplevel" insn chain. */
9425 emit_note (0, NOTE_INSN_DELETED);
9426 rs6000_emit_epilogue (FALSE);
9427 emit_note (0, NOTE_INSN_DELETED);
9428
9429 if (TARGET_DEBUG_STACK)
9430 debug_rtx_list (get_insns (), 100);
9431 final (get_insns (), file, FALSE, FALSE);
9432 end_sequence ();
9433 }
9434 }
9435
9436 /* Output a traceback table here. See /usr/include/sys/debug.h for info
9437 on its format.
9438
9439 We don't output a traceback table if -finhibit-size-directive was
9440 used. The documentation for -finhibit-size-directive reads
9441 ``don't output a @code{.size} assembler directive, or anything
9442 else that would cause trouble if the function is split in the
9443 middle, and the two halves are placed at locations far apart in
9444 memory.'' The traceback table has this property, since it
9445 includes the offset from the start of the function to the
9446 traceback table itself.
9447
9448 System V.4 Powerpc's (and the embedded ABI derived from it) use a
9449 different traceback table. */
9450 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive)
9451 {
9452 const char *fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
9453 const char *language_string = lang_hooks.name;
9454 int fixed_parms = 0, float_parms = 0, parm_info = 0;
9455 int i;
9456
9457 while (*fname == '.') /* V.4 encodes . in the name */
9458 fname++;
9459
9460 /* Need label immediately before tbtab, so we can compute its offset
9461 from the function start. */
9462 if (*fname == '*')
9463 ++fname;
9464 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9465 ASM_OUTPUT_LABEL (file, fname);
9466
9467 /* The .tbtab pseudo-op can only be used for the first eight
9468 expressions, since it can't handle the possibly variable
9469 length fields that follow. However, if you omit the optional
9470 fields, the assembler outputs zeros for all optional fields
9471 anyways, giving each variable length field is minimum length
9472 (as defined in sys/debug.h). Thus we can not use the .tbtab
9473 pseudo-op at all. */
9474
9475 /* An all-zero word flags the start of the tbtab, for debuggers
9476 that have to find it by searching forward from the entry
9477 point or from the current pc. */
9478 fputs ("\t.long 0\n", file);
9479
9480 /* Tbtab format type. Use format type 0. */
9481 fputs ("\t.byte 0,", file);
9482
9483 /* Language type. Unfortunately, there doesn't seem to be any
9484 official way to get this info, so we use language_string. C
9485 is 0. C++ is 9. No number defined for Obj-C, so use the
9486 value for C for now. There is no official value for Java,
9487 although IBM appears to be using 13. There is no official value
9488 for Chill, so we've chosen 44 pseudo-randomly. */
9489 if (! strcmp (language_string, "GNU C")
9490 || ! strcmp (language_string, "GNU Objective-C"))
9491 i = 0;
9492 else if (! strcmp (language_string, "GNU F77"))
9493 i = 1;
9494 else if (! strcmp (language_string, "GNU Ada"))
9495 i = 3;
9496 else if (! strcmp (language_string, "GNU Pascal"))
9497 i = 2;
9498 else if (! strcmp (language_string, "GNU C++"))
9499 i = 9;
9500 else if (! strcmp (language_string, "GNU Java"))
9501 i = 13;
9502 else if (! strcmp (language_string, "GNU CHILL"))
9503 i = 44;
9504 else
9505 abort ();
9506 fprintf (file, "%d,", i);
9507
9508 /* 8 single bit fields: global linkage (not set for C extern linkage,
9509 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
9510 from start of procedure stored in tbtab, internal function, function
9511 has controlled storage, function has no toc, function uses fp,
9512 function logs/aborts fp operations. */
9513 /* Assume that fp operations are used if any fp reg must be saved. */
9514 fprintf (file, "%d,",
9515 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
9516
9517 /* 6 bitfields: function is interrupt handler, name present in
9518 proc table, function calls alloca, on condition directives
9519 (controls stack walks, 3 bits), saves condition reg, saves
9520 link reg. */
9521 /* The `function calls alloca' bit seems to be set whenever reg 31 is
9522 set up as a frame pointer, even when there is no alloca call. */
9523 fprintf (file, "%d,",
9524 ((optional_tbtab << 6)
9525 | ((optional_tbtab & frame_pointer_needed) << 5)
9526 | (info->cr_save_p << 1)
9527 | (info->lr_save_p)));
9528
9529 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
9530 (6 bits). */
9531 fprintf (file, "%d,",
9532 (info->push_p << 7) | (64 - info->first_fp_reg_save));
9533
9534 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
9535 fprintf (file, "%d,", (32 - first_reg_to_save ()));
9536
9537 if (optional_tbtab)
9538 {
9539 /* Compute the parameter info from the function decl argument
9540 list. */
9541 tree decl;
9542 int next_parm_info_bit = 31;
9543
9544 for (decl = DECL_ARGUMENTS (current_function_decl);
9545 decl; decl = TREE_CHAIN (decl))
9546 {
9547 rtx parameter = DECL_INCOMING_RTL (decl);
9548 enum machine_mode mode = GET_MODE (parameter);
9549
9550 if (GET_CODE (parameter) == REG)
9551 {
9552 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
9553 {
9554 int bits;
9555
9556 float_parms++;
9557
9558 if (mode == SFmode)
9559 bits = 0x2;
9560 else if (mode == DFmode)
9561 bits = 0x3;
9562 else
9563 abort ();
9564
9565 /* If only one bit will fit, don't or in this entry. */
9566 if (next_parm_info_bit > 0)
9567 parm_info |= (bits << (next_parm_info_bit - 1));
9568 next_parm_info_bit -= 2;
9569 }
9570 else
9571 {
9572 fixed_parms += ((GET_MODE_SIZE (mode)
9573 + (UNITS_PER_WORD - 1))
9574 / UNITS_PER_WORD);
9575 next_parm_info_bit -= 1;
9576 }
9577 }
9578 }
9579 }
9580
9581 /* Number of fixed point parameters. */
9582 /* This is actually the number of words of fixed point parameters; thus
9583 an 8 byte struct counts as 2; and thus the maximum value is 8. */
9584 fprintf (file, "%d,", fixed_parms);
9585
9586 /* 2 bitfields: number of floating point parameters (7 bits), parameters
9587 all on stack. */
9588 /* This is actually the number of fp registers that hold parameters;
9589 and thus the maximum value is 13. */
9590 /* Set parameters on stack bit if parameters are not in their original
9591 registers, regardless of whether they are on the stack? Xlc
9592 seems to set the bit when not optimizing. */
9593 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
9594
9595 if (! optional_tbtab)
9596 return;
9597
9598 /* Optional fields follow. Some are variable length. */
9599
9600 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
9601 11 double float. */
9602 /* There is an entry for each parameter in a register, in the order that
9603 they occur in the parameter list. Any intervening arguments on the
9604 stack are ignored. If the list overflows a long (max possible length
9605 34 bits) then completely leave off all elements that don't fit. */
9606 /* Only emit this long if there was at least one parameter. */
9607 if (fixed_parms || float_parms)
9608 fprintf (file, "\t.long %d\n", parm_info);
9609
9610 /* Offset from start of code to tb table. */
9611 fputs ("\t.long ", file);
9612 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9613 #if TARGET_AIX
9614 RS6000_OUTPUT_BASENAME (file, fname);
9615 #else
9616 assemble_name (file, fname);
9617 #endif
9618 fputs ("-.", file);
9619 #if TARGET_AIX
9620 RS6000_OUTPUT_BASENAME (file, fname);
9621 #else
9622 assemble_name (file, fname);
9623 #endif
9624 putc ('\n', file);
9625
9626 /* Interrupt handler mask. */
9627 /* Omit this long, since we never set the interrupt handler bit
9628 above. */
9629
9630 /* Number of CTL (controlled storage) anchors. */
9631 /* Omit this long, since the has_ctl bit is never set above. */
9632
9633 /* Displacement into stack of each CTL anchor. */
9634 /* Omit this list of longs, because there are no CTL anchors. */
9635
9636 /* Length of function name. */
9637 fprintf (file, "\t.short %d\n", (int) strlen (fname));
9638
9639 /* Function name. */
9640 assemble_string (fname, strlen (fname));
9641
9642 /* Register for alloca automatic storage; this is always reg 31.
9643 Only emit this if the alloca bit was set above. */
9644 if (frame_pointer_needed)
9645 fputs ("\t.byte 31\n", file);
9646
9647 fputs ("\t.align 2\n", file);
9648 }
9649 return;
9650 }
9651 \f
9652 /* A C compound statement that outputs the assembler code for a thunk
9653 function, used to implement C++ virtual function calls with
9654 multiple inheritance. The thunk acts as a wrapper around a virtual
9655 function, adjusting the implicit object parameter before handing
9656 control off to the real function.
9657
9658 First, emit code to add the integer DELTA to the location that
9659 contains the incoming first argument. Assume that this argument
9660 contains a pointer, and is the one used to pass the `this' pointer
9661 in C++. This is the incoming argument *before* the function
9662 prologue, e.g. `%o0' on a sparc. The addition must preserve the
9663 values of all other incoming arguments.
9664
9665 After the addition, emit code to jump to FUNCTION, which is a
9666 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
9667 not touch the return address. Hence returning from FUNCTION will
9668 return to whoever called the current `thunk'.
9669
9670 The effect must be as if FUNCTION had been called directly with the
9671 adjusted first argument. This macro is responsible for emitting
9672 all of the code for a thunk function; output_function_prologue()
9673 and output_function_epilogue() are not invoked.
9674
9675 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
9676 been extracted from it.) It might possibly be useful on some
9677 targets, but probably not.
9678
9679 If you do not define this macro, the target-independent code in the
9680 C++ frontend will generate a less efficient heavyweight thunk that
9681 calls FUNCTION instead of jumping to it. The generic approach does
9682 not support varargs. */
9683
9684 void
9685 output_mi_thunk (file, thunk_fndecl, delta, function)
9686 FILE *file;
9687 tree thunk_fndecl ATTRIBUTE_UNUSED;
9688 int delta;
9689 tree function;
9690 {
9691 const char *this_reg =
9692 reg_names[ aggregate_value_p (TREE_TYPE (TREE_TYPE (function))) ? 4 : 3 ];
9693 const char *prefix;
9694 const char *fname;
9695 const char *r0 = reg_names[0];
9696 const char *toc = reg_names[2];
9697 const char *schain = reg_names[11];
9698 const char *r12 = reg_names[12];
9699 char buf[512];
9700 static int labelno = 0;
9701
9702 /* Small constants that can be done by one add instruction. */
9703 if (delta >= -32768 && delta <= 32767)
9704 {
9705 if (! TARGET_NEW_MNEMONICS)
9706 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta, this_reg);
9707 else
9708 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta);
9709 }
9710
9711 /* Large constants that can be done by one addis instruction. */
9712 else if ((delta & 0xffff) == 0 && num_insns_constant_wide (delta) == 1)
9713 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9714 delta >> 16);
9715
9716 /* 32-bit constants that can be done by an add and addis instruction. */
9717 else if (TARGET_32BIT || num_insns_constant_wide (delta) == 1)
9718 {
9719 /* Break into two pieces, propagating the sign bit from the low
9720 word to the upper word. */
9721 int delta_high = delta >> 16;
9722 int delta_low = delta & 0xffff;
9723 if ((delta_low & 0x8000) != 0)
9724 {
9725 delta_high++;
9726 delta_low = (delta_low ^ 0x8000) - 0x8000; /* sign extend */
9727 }
9728
9729 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9730 delta_high);
9731
9732 if (! TARGET_NEW_MNEMONICS)
9733 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta_low, this_reg);
9734 else
9735 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta_low);
9736 }
9737
9738 /* 64-bit constants, fixme */
9739 else
9740 abort ();
9741
9742 /* Get the prefix in front of the names. */
9743 switch (DEFAULT_ABI)
9744 {
9745 default:
9746 abort ();
9747
9748 case ABI_AIX:
9749 prefix = ".";
9750 break;
9751
9752 case ABI_V4:
9753 case ABI_AIX_NODESC:
9754 prefix = "";
9755 break;
9756 }
9757
9758 /* If the function is compiled in this module, jump to it directly.
9759 Otherwise, load up its address and jump to it. */
9760
9761 fname = XSTR (XEXP (DECL_RTL (function), 0), 0);
9762
9763 if (current_file_function_operand (XEXP (DECL_RTL (function), 0), VOIDmode)
9764 && ! lookup_attribute ("longcall",
9765 TYPE_ATTRIBUTES (TREE_TYPE (function))))
9766 {
9767 fprintf (file, "\tb %s", prefix);
9768 assemble_name (file, fname);
9769 if (DEFAULT_ABI == ABI_V4 && flag_pic) fputs ("@local", file);
9770 putc ('\n', file);
9771 }
9772
9773 else
9774 {
9775 switch (DEFAULT_ABI)
9776 {
9777 default:
9778 abort ();
9779
9780 case ABI_AIX:
9781 /* Set up a TOC entry for the function. */
9782 ASM_GENERATE_INTERNAL_LABEL (buf, "Lthunk", labelno);
9783 toc_section ();
9784 ASM_OUTPUT_INTERNAL_LABEL (file, "Lthunk", labelno);
9785 labelno++;
9786
9787 if (TARGET_MINIMAL_TOC)
9788 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
9789 else
9790 {
9791 fputs ("\t.tc ", file);
9792 assemble_name (file, fname);
9793 fputs ("[TC],", file);
9794 }
9795 assemble_name (file, fname);
9796 putc ('\n', file);
9797 text_section ();
9798 if (TARGET_MINIMAL_TOC)
9799 asm_fprintf (file, (TARGET_32BIT)
9800 ? "\t{l|lwz} %s,%s(%s)\n" : "\tld %s,%s(%s)\n", r12,
9801 TARGET_ELF ? ".LCTOC0@toc" : ".LCTOC..1", toc);
9802 asm_fprintf (file, (TARGET_32BIT) ? "\t{l|lwz} %s," : "\tld %s,", r12);
9803 assemble_name (file, buf);
9804 if (TARGET_ELF && TARGET_MINIMAL_TOC)
9805 fputs ("-(.LCTOC1)", file);
9806 asm_fprintf (file, "(%s)\n", TARGET_MINIMAL_TOC ? r12 : toc);
9807 asm_fprintf (file,
9808 (TARGET_32BIT) ? "\t{l|lwz} %s,0(%s)\n" : "\tld %s,0(%s)\n",
9809 r0, r12);
9810
9811 asm_fprintf (file,
9812 (TARGET_32BIT) ? "\t{l|lwz} %s,4(%s)\n" : "\tld %s,8(%s)\n",
9813 toc, r12);
9814
9815 asm_fprintf (file, "\tmtctr %s\n", r0);
9816 asm_fprintf (file,
9817 (TARGET_32BIT) ? "\t{l|lwz} %s,8(%s)\n" : "\tld %s,16(%s)\n",
9818 schain, r12);
9819
9820 asm_fprintf (file, "\tbctr\n");
9821 break;
9822
9823 case ABI_AIX_NODESC:
9824 case ABI_V4:
9825 fprintf (file, "\tb %s", prefix);
9826 assemble_name (file, fname);
9827 if (flag_pic) fputs ("@plt", file);
9828 putc ('\n', file);
9829 break;
9830
9831 #if TARGET_MACHO
9832 case ABI_DARWIN:
9833 fprintf (file, "\tb %s", prefix);
9834 if (flag_pic && !machopic_name_defined_p (fname))
9835 assemble_name (file, machopic_stub_name (fname));
9836 else
9837 assemble_name (file, fname);
9838 putc ('\n', file);
9839 break;
9840 #endif
9841 }
9842 }
9843 }
9844
9845 \f
9846 /* A quick summary of the various types of 'constant-pool tables'
9847 under PowerPC:
9848
9849 Target Flags Name One table per
9850 AIX (none) AIX TOC object file
9851 AIX -mfull-toc AIX TOC object file
9852 AIX -mminimal-toc AIX minimal TOC translation unit
9853 SVR4/EABI (none) SVR4 SDATA object file
9854 SVR4/EABI -fpic SVR4 pic object file
9855 SVR4/EABI -fPIC SVR4 PIC translation unit
9856 SVR4/EABI -mrelocatable EABI TOC function
9857 SVR4/EABI -maix AIX TOC object file
9858 SVR4/EABI -maix -mminimal-toc
9859 AIX minimal TOC translation unit
9860
9861 Name Reg. Set by entries contains:
9862 made by addrs? fp? sum?
9863
9864 AIX TOC 2 crt0 as Y option option
9865 AIX minimal TOC 30 prolog gcc Y Y option
9866 SVR4 SDATA 13 crt0 gcc N Y N
9867 SVR4 pic 30 prolog ld Y not yet N
9868 SVR4 PIC 30 prolog gcc Y option option
9869 EABI TOC 30 prolog gcc Y option option
9870
9871 */
9872
9873 /* Hash table stuff for keeping track of TOC entries. */
9874
9875 struct toc_hash_struct
9876 {
9877 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
9878 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
9879 rtx key;
9880 enum machine_mode key_mode;
9881 int labelno;
9882 };
9883
9884 static htab_t toc_hash_table;
9885
9886 /* Hash functions for the hash table. */
9887
9888 static unsigned
9889 rs6000_hash_constant (k)
9890 rtx k;
9891 {
9892 unsigned result = (GET_CODE (k) << 3) ^ GET_MODE (k);
9893 const char *format = GET_RTX_FORMAT (GET_CODE (k));
9894 int flen = strlen (format);
9895 int fidx;
9896
9897 if (GET_CODE (k) == LABEL_REF)
9898 return result * 1231 + X0INT (XEXP (k, 0), 3);
9899
9900 if (GET_CODE (k) == CONST_DOUBLE)
9901 fidx = 1;
9902 else if (GET_CODE (k) == CODE_LABEL)
9903 fidx = 3;
9904 else
9905 fidx = 0;
9906
9907 for (; fidx < flen; fidx++)
9908 switch (format[fidx])
9909 {
9910 case 's':
9911 {
9912 unsigned i, len;
9913 const char *str = XSTR (k, fidx);
9914 len = strlen (str);
9915 result = result * 613 + len;
9916 for (i = 0; i < len; i++)
9917 result = result * 613 + (unsigned) str[i];
9918 break;
9919 }
9920 case 'u':
9921 case 'e':
9922 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
9923 break;
9924 case 'i':
9925 case 'n':
9926 result = result * 613 + (unsigned) XINT (k, fidx);
9927 break;
9928 case 'w':
9929 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
9930 result = result * 613 + (unsigned) XWINT (k, fidx);
9931 else
9932 {
9933 size_t i;
9934 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
9935 result = result * 613 + (unsigned) (XWINT (k, fidx)
9936 >> CHAR_BIT * i);
9937 }
9938 break;
9939 default:
9940 abort ();
9941 }
9942 return result;
9943 }
9944
9945 static unsigned
9946 toc_hash_function (hash_entry)
9947 const void * hash_entry;
9948 {
9949 const struct toc_hash_struct *thc =
9950 (const struct toc_hash_struct *) hash_entry;
9951 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
9952 }
9953
9954 /* Compare H1 and H2 for equivalence. */
9955
9956 static int
9957 toc_hash_eq (h1, h2)
9958 const void * h1;
9959 const void * h2;
9960 {
9961 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
9962 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
9963
9964 if (((const struct toc_hash_struct *) h1)->key_mode
9965 != ((const struct toc_hash_struct *) h2)->key_mode)
9966 return 0;
9967
9968 /* Gotcha: One of these const_doubles will be in memory.
9969 The other may be on the constant-pool chain.
9970 So rtx_equal_p will think they are different... */
9971 if (r1 == r2)
9972 return 1;
9973 if (GET_CODE (r1) != GET_CODE (r2)
9974 || GET_MODE (r1) != GET_MODE (r2))
9975 return 0;
9976 if (GET_CODE (r1) == CONST_DOUBLE)
9977 {
9978 int format_len = strlen (GET_RTX_FORMAT (CONST_DOUBLE));
9979 int i;
9980 for (i = 1; i < format_len; i++)
9981 if (XWINT (r1, i) != XWINT (r2, i))
9982 return 0;
9983
9984 return 1;
9985 }
9986 else if (GET_CODE (r1) == LABEL_REF)
9987 return (CODE_LABEL_NUMBER (XEXP (r1, 0))
9988 == CODE_LABEL_NUMBER (XEXP (r2, 0)));
9989 else
9990 return rtx_equal_p (r1, r2);
9991 }
9992
9993 /* Mark the hash table-entry HASH_ENTRY. */
9994
9995 static int
9996 toc_hash_mark_entry (hash_slot, unused)
9997 void ** hash_slot;
9998 void * unused ATTRIBUTE_UNUSED;
9999 {
10000 const struct toc_hash_struct * hash_entry =
10001 *(const struct toc_hash_struct **) hash_slot;
10002 rtx r = hash_entry->key;
10003 ggc_set_mark (hash_entry);
10004 /* For CODE_LABELS, we don't want to drag in the whole insn chain... */
10005 if (GET_CODE (r) == LABEL_REF)
10006 {
10007 ggc_set_mark (r);
10008 ggc_set_mark (XEXP (r, 0));
10009 }
10010 else
10011 ggc_mark_rtx (r);
10012 return 1;
10013 }
10014
10015 /* Mark all the elements of the TOC hash-table *HT. */
10016
10017 static void
10018 toc_hash_mark_table (vht)
10019 void *vht;
10020 {
10021 htab_t *ht = vht;
10022
10023 htab_traverse (*ht, toc_hash_mark_entry, (void *)0);
10024 }
10025
10026 /* These are the names given by the C++ front-end to vtables, and
10027 vtable-like objects. Ideally, this logic should not be here;
10028 instead, there should be some programmatic way of inquiring as
10029 to whether or not an object is a vtable. */
10030
10031 #define VTABLE_NAME_P(NAME) \
10032 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
10033 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
10034 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
10035 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
10036
10037 void
10038 rs6000_output_symbol_ref (file, x)
10039 FILE *file;
10040 rtx x;
10041 {
10042 /* Currently C++ toc references to vtables can be emitted before it
10043 is decided whether the vtable is public or private. If this is
10044 the case, then the linker will eventually complain that there is
10045 a reference to an unknown section. Thus, for vtables only,
10046 we emit the TOC reference to reference the symbol and not the
10047 section. */
10048 const char *name = XSTR (x, 0);
10049
10050 if (VTABLE_NAME_P (name))
10051 {
10052 RS6000_OUTPUT_BASENAME (file, name);
10053 }
10054 else
10055 assemble_name (file, name);
10056 }
10057
10058 /* Output a TOC entry. We derive the entry name from what is being
10059 written. */
10060
10061 void
10062 output_toc (file, x, labelno, mode)
10063 FILE *file;
10064 rtx x;
10065 int labelno;
10066 enum machine_mode mode;
10067 {
10068 char buf[256];
10069 const char *name = buf;
10070 const char *real_name;
10071 rtx base = x;
10072 int offset = 0;
10073
10074 if (TARGET_NO_TOC)
10075 abort ();
10076
10077 /* When the linker won't eliminate them, don't output duplicate
10078 TOC entries (this happens on AIX if there is any kind of TOC,
10079 and on SVR4 under -fPIC or -mrelocatable). */
10080 if (TARGET_TOC)
10081 {
10082 struct toc_hash_struct *h;
10083 void * * found;
10084
10085 h = ggc_alloc (sizeof (*h));
10086 h->key = x;
10087 h->key_mode = mode;
10088 h->labelno = labelno;
10089
10090 found = htab_find_slot (toc_hash_table, h, 1);
10091 if (*found == NULL)
10092 *found = h;
10093 else /* This is indeed a duplicate.
10094 Set this label equal to that label. */
10095 {
10096 fputs ("\t.set ", file);
10097 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
10098 fprintf (file, "%d,", labelno);
10099 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
10100 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
10101 found)->labelno));
10102 return;
10103 }
10104 }
10105
10106 /* If we're going to put a double constant in the TOC, make sure it's
10107 aligned properly when strict alignment is on. */
10108 if (GET_CODE (x) == CONST_DOUBLE
10109 && STRICT_ALIGNMENT
10110 && GET_MODE_BITSIZE (mode) >= 64
10111 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
10112 ASM_OUTPUT_ALIGN (file, 3);
10113 }
10114
10115 ASM_OUTPUT_INTERNAL_LABEL (file, "LC", labelno);
10116
10117 /* Handle FP constants specially. Note that if we have a minimal
10118 TOC, things we put here aren't actually in the TOC, so we can allow
10119 FP constants. */
10120 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
10121 {
10122 REAL_VALUE_TYPE rv;
10123 long k[2];
10124
10125 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
10126 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
10127
10128 if (TARGET_64BIT)
10129 {
10130 if (TARGET_MINIMAL_TOC)
10131 fputs (DOUBLE_INT_ASM_OP, file);
10132 else
10133 fprintf (file, "\t.tc FD_%lx_%lx[TC],", k[0], k[1]);
10134 fprintf (file, "0x%lx%08lx\n", k[0], k[1]);
10135 return;
10136 }
10137 else
10138 {
10139 if (TARGET_MINIMAL_TOC)
10140 fputs ("\t.long ", file);
10141 else
10142 fprintf (file, "\t.tc FD_%lx_%lx[TC],", k[0], k[1]);
10143 fprintf (file, "0x%lx,0x%lx\n", k[0], k[1]);
10144 return;
10145 }
10146 }
10147 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
10148 {
10149 REAL_VALUE_TYPE rv;
10150 long l;
10151
10152 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
10153 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
10154
10155 if (TARGET_64BIT)
10156 {
10157 if (TARGET_MINIMAL_TOC)
10158 fputs (DOUBLE_INT_ASM_OP, file);
10159 else
10160 fprintf (file, "\t.tc FS_%lx[TC],", l);
10161 fprintf (file, "0x%lx00000000\n", l);
10162 return;
10163 }
10164 else
10165 {
10166 if (TARGET_MINIMAL_TOC)
10167 fputs ("\t.long ", file);
10168 else
10169 fprintf (file, "\t.tc FS_%lx[TC],", l);
10170 fprintf (file, "0x%lx\n", l);
10171 return;
10172 }
10173 }
10174 else if (GET_MODE (x) == VOIDmode
10175 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
10176 {
10177 unsigned HOST_WIDE_INT low;
10178 HOST_WIDE_INT high;
10179
10180 if (GET_CODE (x) == CONST_DOUBLE)
10181 {
10182 low = CONST_DOUBLE_LOW (x);
10183 high = CONST_DOUBLE_HIGH (x);
10184 }
10185 else
10186 #if HOST_BITS_PER_WIDE_INT == 32
10187 {
10188 low = INTVAL (x);
10189 high = (low & 0x80000000) ? ~0 : 0;
10190 }
10191 #else
10192 {
10193 low = INTVAL (x) & 0xffffffff;
10194 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
10195 }
10196 #endif
10197
10198 /* TOC entries are always Pmode-sized, but since this
10199 is a bigendian machine then if we're putting smaller
10200 integer constants in the TOC we have to pad them.
10201 (This is still a win over putting the constants in
10202 a separate constant pool, because then we'd have
10203 to have both a TOC entry _and_ the actual constant.)
10204
10205 For a 32-bit target, CONST_INT values are loaded and shifted
10206 entirely within `low' and can be stored in one TOC entry. */
10207
10208 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
10209 abort ();/* It would be easy to make this work, but it doesn't now. */
10210
10211 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
10212 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
10213 POINTER_SIZE, &low, &high, 0);
10214
10215 if (TARGET_64BIT)
10216 {
10217 if (TARGET_MINIMAL_TOC)
10218 fputs (DOUBLE_INT_ASM_OP, file);
10219 else
10220 fprintf (file, "\t.tc ID_%lx_%lx[TC],", (long) high, (long) low);
10221 fprintf (file, "0x%lx%08lx\n", (long) high, (long) low);
10222 return;
10223 }
10224 else
10225 {
10226 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
10227 {
10228 if (TARGET_MINIMAL_TOC)
10229 fputs ("\t.long ", file);
10230 else
10231 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
10232 (long) high, (long) low);
10233 fprintf (file, "0x%lx,0x%lx\n", (long) high, (long) low);
10234 }
10235 else
10236 {
10237 if (TARGET_MINIMAL_TOC)
10238 fputs ("\t.long ", file);
10239 else
10240 fprintf (file, "\t.tc IS_%lx[TC],", (long) low);
10241 fprintf (file, "0x%lx\n", (long) low);
10242 }
10243 return;
10244 }
10245 }
10246
10247 if (GET_CODE (x) == CONST)
10248 {
10249 if (GET_CODE (XEXP (x, 0)) != PLUS)
10250 abort ();
10251
10252 base = XEXP (XEXP (x, 0), 0);
10253 offset = INTVAL (XEXP (XEXP (x, 0), 1));
10254 }
10255
10256 if (GET_CODE (base) == SYMBOL_REF)
10257 name = XSTR (base, 0);
10258 else if (GET_CODE (base) == LABEL_REF)
10259 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
10260 else if (GET_CODE (base) == CODE_LABEL)
10261 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
10262 else
10263 abort ();
10264
10265 STRIP_NAME_ENCODING (real_name, name);
10266 if (TARGET_MINIMAL_TOC)
10267 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
10268 else
10269 {
10270 fprintf (file, "\t.tc %s", real_name);
10271
10272 if (offset < 0)
10273 fprintf (file, ".N%d", - offset);
10274 else if (offset)
10275 fprintf (file, ".P%d", offset);
10276
10277 fputs ("[TC],", file);
10278 }
10279
10280 /* Currently C++ toc references to vtables can be emitted before it
10281 is decided whether the vtable is public or private. If this is
10282 the case, then the linker will eventually complain that there is
10283 a TOC reference to an unknown section. Thus, for vtables only,
10284 we emit the TOC reference to reference the symbol and not the
10285 section. */
10286 if (VTABLE_NAME_P (name))
10287 {
10288 RS6000_OUTPUT_BASENAME (file, name);
10289 if (offset < 0)
10290 fprintf (file, "%d", offset);
10291 else if (offset > 0)
10292 fprintf (file, "+%d", offset);
10293 }
10294 else
10295 output_addr_const (file, x);
10296 putc ('\n', file);
10297 }
10298 \f
10299 /* Output an assembler pseudo-op to write an ASCII string of N characters
10300 starting at P to FILE.
10301
10302 On the RS/6000, we have to do this using the .byte operation and
10303 write out special characters outside the quoted string.
10304 Also, the assembler is broken; very long strings are truncated,
10305 so we must artificially break them up early. */
10306
10307 void
10308 output_ascii (file, p, n)
10309 FILE *file;
10310 const char *p;
10311 int n;
10312 {
10313 char c;
10314 int i, count_string;
10315 const char *for_string = "\t.byte \"";
10316 const char *for_decimal = "\t.byte ";
10317 const char *to_close = NULL;
10318
10319 count_string = 0;
10320 for (i = 0; i < n; i++)
10321 {
10322 c = *p++;
10323 if (c >= ' ' && c < 0177)
10324 {
10325 if (for_string)
10326 fputs (for_string, file);
10327 putc (c, file);
10328
10329 /* Write two quotes to get one. */
10330 if (c == '"')
10331 {
10332 putc (c, file);
10333 ++count_string;
10334 }
10335
10336 for_string = NULL;
10337 for_decimal = "\"\n\t.byte ";
10338 to_close = "\"\n";
10339 ++count_string;
10340
10341 if (count_string >= 512)
10342 {
10343 fputs (to_close, file);
10344
10345 for_string = "\t.byte \"";
10346 for_decimal = "\t.byte ";
10347 to_close = NULL;
10348 count_string = 0;
10349 }
10350 }
10351 else
10352 {
10353 if (for_decimal)
10354 fputs (for_decimal, file);
10355 fprintf (file, "%d", c);
10356
10357 for_string = "\n\t.byte \"";
10358 for_decimal = ", ";
10359 to_close = "\n";
10360 count_string = 0;
10361 }
10362 }
10363
10364 /* Now close the string if we have written one. Then end the line. */
10365 if (to_close)
10366 fputs (to_close, file);
10367 }
10368 \f
10369 /* Generate a unique section name for FILENAME for a section type
10370 represented by SECTION_DESC. Output goes into BUF.
10371
10372 SECTION_DESC can be any string, as long as it is different for each
10373 possible section type.
10374
10375 We name the section in the same manner as xlc. The name begins with an
10376 underscore followed by the filename (after stripping any leading directory
10377 names) with the last period replaced by the string SECTION_DESC. If
10378 FILENAME does not contain a period, SECTION_DESC is appended to the end of
10379 the name. */
10380
10381 void
10382 rs6000_gen_section_name (buf, filename, section_desc)
10383 char **buf;
10384 const char *filename;
10385 const char *section_desc;
10386 {
10387 const char *q, *after_last_slash, *last_period = 0;
10388 char *p;
10389 int len;
10390
10391 after_last_slash = filename;
10392 for (q = filename; *q; q++)
10393 {
10394 if (*q == '/')
10395 after_last_slash = q + 1;
10396 else if (*q == '.')
10397 last_period = q;
10398 }
10399
10400 len = strlen (after_last_slash) + strlen (section_desc) + 2;
10401 *buf = (char *) permalloc (len);
10402
10403 p = *buf;
10404 *p++ = '_';
10405
10406 for (q = after_last_slash; *q; q++)
10407 {
10408 if (q == last_period)
10409 {
10410 strcpy (p, section_desc);
10411 p += strlen (section_desc);
10412 }
10413
10414 else if (ISALNUM (*q))
10415 *p++ = *q;
10416 }
10417
10418 if (last_period == 0)
10419 strcpy (p, section_desc);
10420 else
10421 *p = '\0';
10422 }
10423 \f
10424 /* Emit profile function. */
10425
10426 void
10427 output_profile_hook (labelno)
10428 int labelno;
10429 {
10430 if (DEFAULT_ABI == ABI_AIX)
10431 {
10432 char buf[30];
10433 const char *label_name;
10434 rtx fun;
10435
10436 labelno += 1;
10437
10438 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10439 STRIP_NAME_ENCODING (label_name, ggc_strdup (buf));
10440 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
10441
10442 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
10443 fun, Pmode);
10444 }
10445 else if (DEFAULT_ABI == ABI_DARWIN)
10446 {
10447 const char *mcount_name = RS6000_MCOUNT;
10448 int caller_addr_regno = LINK_REGISTER_REGNUM;
10449
10450 /* Be conservative and always set this, at least for now. */
10451 current_function_uses_pic_offset_table = 1;
10452
10453 #if TARGET_MACHO
10454 /* For PIC code, set up a stub and collect the caller's address
10455 from r0, which is where the prologue puts it. */
10456 if (flag_pic)
10457 {
10458 mcount_name = machopic_stub_name (mcount_name);
10459 if (current_function_uses_pic_offset_table)
10460 caller_addr_regno = 0;
10461 }
10462 #endif
10463 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
10464 0, VOIDmode, 1,
10465 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
10466 }
10467 }
10468
10469 /* Write function profiler code. */
10470
10471 void
10472 output_function_profiler (file, labelno)
10473 FILE *file;
10474 int labelno;
10475 {
10476 char buf[100];
10477
10478 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10479 switch (DEFAULT_ABI)
10480 {
10481 default:
10482 abort ();
10483
10484 case ABI_V4:
10485 case ABI_AIX_NODESC:
10486 fprintf (file, "\tmflr %s\n", reg_names[0]);
10487 if (flag_pic == 1)
10488 {
10489 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
10490 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10491 reg_names[0], reg_names[1]);
10492 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
10493 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
10494 assemble_name (file, buf);
10495 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
10496 }
10497 else if (flag_pic > 1)
10498 {
10499 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10500 reg_names[0], reg_names[1]);
10501 /* Now, we need to get the address of the label. */
10502 fputs ("\tbl 1f\n\t.long ", file);
10503 assemble_name (file, buf);
10504 fputs ("-.\n1:", file);
10505 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
10506 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
10507 reg_names[0], reg_names[11]);
10508 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
10509 reg_names[0], reg_names[0], reg_names[11]);
10510 }
10511 else
10512 {
10513 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
10514 assemble_name (file, buf);
10515 fputs ("@ha\n", file);
10516 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10517 reg_names[0], reg_names[1]);
10518 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
10519 assemble_name (file, buf);
10520 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
10521 }
10522
10523 if (current_function_needs_context)
10524 asm_fprintf (file, "\tmr %s,%s\n",
10525 reg_names[30], reg_names[STATIC_CHAIN_REGNUM]);
10526 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
10527 if (current_function_needs_context)
10528 asm_fprintf (file, "\tmr %s,%s\n",
10529 reg_names[STATIC_CHAIN_REGNUM], reg_names[30]);
10530 break;
10531
10532 case ABI_AIX:
10533 case ABI_DARWIN:
10534 /* Don't do anything, done in output_profile_hook (). */
10535 break;
10536
10537 }
10538 }
10539
10540 /* Adjust the cost of a scheduling dependency. Return the new cost of
10541 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
10542
10543 static int
10544 rs6000_adjust_cost (insn, link, dep_insn, cost)
10545 rtx insn;
10546 rtx link;
10547 rtx dep_insn ATTRIBUTE_UNUSED;
10548 int cost;
10549 {
10550 if (! recog_memoized (insn))
10551 return 0;
10552
10553 if (REG_NOTE_KIND (link) != 0)
10554 return 0;
10555
10556 if (REG_NOTE_KIND (link) == 0)
10557 {
10558 /* Data dependency; DEP_INSN writes a register that INSN reads
10559 some cycles later. */
10560 switch (get_attr_type (insn))
10561 {
10562 case TYPE_JMPREG:
10563 /* Tell the first scheduling pass about the latency between
10564 a mtctr and bctr (and mtlr and br/blr). The first
10565 scheduling pass will not know about this latency since
10566 the mtctr instruction, which has the latency associated
10567 to it, will be generated by reload. */
10568 return TARGET_POWER ? 5 : 4;
10569 case TYPE_BRANCH:
10570 /* Leave some extra cycles between a compare and its
10571 dependent branch, to inhibit expensive mispredicts. */
10572 if ((rs6000_cpu_attr == CPU_PPC750
10573 || rs6000_cpu_attr == CPU_PPC7400
10574 || rs6000_cpu_attr == CPU_PPC7450)
10575 && recog_memoized (dep_insn)
10576 && (INSN_CODE (dep_insn) >= 0)
10577 && (get_attr_type (dep_insn) == TYPE_COMPARE
10578 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
10579 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
10580 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL))
10581 return cost + 2;
10582 default:
10583 break;
10584 }
10585 /* Fall out to return default cost. */
10586 }
10587
10588 return cost;
10589 }
10590
10591 /* A C statement (sans semicolon) to update the integer scheduling
10592 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
10593 INSN earlier, increase the priority to execute INSN later. Do not
10594 define this macro if you do not need to adjust the scheduling
10595 priorities of insns. */
10596
10597 static int
10598 rs6000_adjust_priority (insn, priority)
10599 rtx insn ATTRIBUTE_UNUSED;
10600 int priority;
10601 {
10602 /* On machines (like the 750) which have asymmetric integer units,
10603 where one integer unit can do multiply and divides and the other
10604 can't, reduce the priority of multiply/divide so it is scheduled
10605 before other integer operations. */
10606
10607 #if 0
10608 if (! INSN_P (insn))
10609 return priority;
10610
10611 if (GET_CODE (PATTERN (insn)) == USE)
10612 return priority;
10613
10614 switch (rs6000_cpu_attr) {
10615 case CPU_PPC750:
10616 switch (get_attr_type (insn))
10617 {
10618 default:
10619 break;
10620
10621 case TYPE_IMUL:
10622 case TYPE_IDIV:
10623 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
10624 priority, priority);
10625 if (priority >= 0 && priority < 0x01000000)
10626 priority >>= 3;
10627 break;
10628 }
10629 }
10630 #endif
10631
10632 return priority;
10633 }
10634
10635 /* Return how many instructions the machine can issue per cycle. */
10636
10637 static int
10638 rs6000_issue_rate ()
10639 {
10640 switch (rs6000_cpu_attr) {
10641 case CPU_RIOS1: /* ? */
10642 case CPU_RS64A:
10643 case CPU_PPC601: /* ? */
10644 case CPU_PPC7450:
10645 return 3;
10646 case CPU_PPC603:
10647 case CPU_PPC750:
10648 case CPU_PPC7400:
10649 return 2;
10650 case CPU_RIOS2:
10651 case CPU_PPC604:
10652 case CPU_PPC604E:
10653 case CPU_PPC620:
10654 case CPU_PPC630:
10655 return 4;
10656 default:
10657 return 1;
10658 }
10659 }
10660
10661 \f
10662 /* Length in units of the trampoline for entering a nested function. */
10663
10664 int
10665 rs6000_trampoline_size ()
10666 {
10667 int ret = 0;
10668
10669 switch (DEFAULT_ABI)
10670 {
10671 default:
10672 abort ();
10673
10674 case ABI_AIX:
10675 ret = (TARGET_32BIT) ? 12 : 24;
10676 break;
10677
10678 case ABI_DARWIN:
10679 case ABI_V4:
10680 case ABI_AIX_NODESC:
10681 ret = (TARGET_32BIT) ? 40 : 48;
10682 break;
10683 }
10684
10685 return ret;
10686 }
10687
10688 /* Emit RTL insns to initialize the variable parts of a trampoline.
10689 FNADDR is an RTX for the address of the function's pure code.
10690 CXT is an RTX for the static chain value for the function. */
10691
10692 void
10693 rs6000_initialize_trampoline (addr, fnaddr, cxt)
10694 rtx addr;
10695 rtx fnaddr;
10696 rtx cxt;
10697 {
10698 enum machine_mode pmode = Pmode;
10699 int regsize = (TARGET_32BIT) ? 4 : 8;
10700 rtx ctx_reg = force_reg (pmode, cxt);
10701
10702 switch (DEFAULT_ABI)
10703 {
10704 default:
10705 abort ();
10706
10707 /* Macros to shorten the code expansions below. */
10708 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
10709 #define MEM_PLUS(addr,offset) \
10710 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
10711
10712 /* Under AIX, just build the 3 word function descriptor */
10713 case ABI_AIX:
10714 {
10715 rtx fn_reg = gen_reg_rtx (pmode);
10716 rtx toc_reg = gen_reg_rtx (pmode);
10717 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
10718 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
10719 emit_move_insn (MEM_DEREF (addr), fn_reg);
10720 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
10721 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
10722 }
10723 break;
10724
10725 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
10726 case ABI_DARWIN:
10727 case ABI_V4:
10728 case ABI_AIX_NODESC:
10729 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
10730 FALSE, VOIDmode, 4,
10731 addr, pmode,
10732 GEN_INT (rs6000_trampoline_size ()), SImode,
10733 fnaddr, pmode,
10734 ctx_reg, pmode);
10735 break;
10736 }
10737
10738 return;
10739 }
10740
10741 \f
10742 /* Table of valid machine attributes. */
10743
10744 const struct attribute_spec rs6000_attribute_table[] =
10745 {
10746 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
10747 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
10748 { NULL, 0, 0, false, false, false, NULL }
10749 };
10750
10751 /* Handle a "longcall" attribute; arguments as in struct
10752 attribute_spec.handler. */
10753
10754 static tree
10755 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
10756 tree *node;
10757 tree name;
10758 tree args ATTRIBUTE_UNUSED;
10759 int flags ATTRIBUTE_UNUSED;
10760 bool *no_add_attrs;
10761 {
10762 if (TREE_CODE (*node) != FUNCTION_TYPE
10763 && TREE_CODE (*node) != FIELD_DECL
10764 && TREE_CODE (*node) != TYPE_DECL)
10765 {
10766 warning ("`%s' attribute only applies to functions",
10767 IDENTIFIER_POINTER (name));
10768 *no_add_attrs = true;
10769 }
10770
10771 return NULL_TREE;
10772 }
10773
10774 /* Return a reference suitable for calling a function with the
10775 longcall attribute. */
10776
10777 struct rtx_def *
10778 rs6000_longcall_ref (call_ref)
10779 rtx call_ref;
10780 {
10781 const char *call_name;
10782 tree node;
10783
10784 if (GET_CODE (call_ref) != SYMBOL_REF)
10785 return call_ref;
10786
10787 /* System V adds '.' to the internal name, so skip them. */
10788 call_name = XSTR (call_ref, 0);
10789 if (*call_name == '.')
10790 {
10791 while (*call_name == '.')
10792 call_name++;
10793
10794 node = get_identifier (call_name);
10795 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
10796 }
10797
10798 return force_reg (Pmode, call_ref);
10799 }
10800
10801 \f
10802 /* A C statement or statements to switch to the appropriate section
10803 for output of RTX in mode MODE. You can assume that RTX is some
10804 kind of constant in RTL. The argument MODE is redundant except in
10805 the case of a `const_int' rtx. Select the section by calling
10806 `text_section' or one of the alternatives for other sections.
10807
10808 Do not define this macro if you put all constants in the read-only
10809 data section. */
10810
10811 #ifdef USING_ELFOS_H
10812
10813 void
10814 rs6000_select_rtx_section (mode, x)
10815 enum machine_mode mode;
10816 rtx x;
10817 {
10818 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
10819 toc_section ();
10820 else
10821 const_section ();
10822 }
10823
10824 /* A C statement or statements to switch to the appropriate
10825 section for output of DECL. DECL is either a `VAR_DECL' node
10826 or a constant of some sort. RELOC indicates whether forming
10827 the initial value of DECL requires link-time relocations. */
10828
10829 void
10830 rs6000_select_section (decl, reloc)
10831 tree decl;
10832 int reloc;
10833 {
10834 int size = int_size_in_bytes (TREE_TYPE (decl));
10835 int needs_sdata;
10836 int readonly;
10837 static void (* const sec_funcs[4]) PARAMS ((void)) = {
10838 &const_section,
10839 &sdata2_section,
10840 &data_section,
10841 &sdata_section
10842 };
10843
10844 needs_sdata = (size > 0
10845 && size <= g_switch_value
10846 && rs6000_sdata != SDATA_NONE
10847 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10848
10849 if (TREE_CODE (decl) == STRING_CST)
10850 readonly = ! flag_writable_strings;
10851 else if (TREE_CODE (decl) == VAR_DECL)
10852 readonly = (! (flag_pic && reloc)
10853 && TREE_READONLY (decl)
10854 && ! TREE_SIDE_EFFECTS (decl)
10855 && DECL_INITIAL (decl)
10856 && DECL_INITIAL (decl) != error_mark_node
10857 && TREE_CONSTANT (DECL_INITIAL (decl)));
10858 else if (TREE_CODE (decl) == CONSTRUCTOR)
10859 readonly = (! (flag_pic && reloc)
10860 && ! TREE_SIDE_EFFECTS (decl)
10861 && TREE_CONSTANT (decl));
10862 else
10863 readonly = 1;
10864 if (needs_sdata && rs6000_sdata != SDATA_EABI)
10865 readonly = 0;
10866
10867 (*sec_funcs[(readonly ? 0 : 2) + (needs_sdata ? 1 : 0)])();
10868 }
10869
10870 /* A C statement to build up a unique section name, expressed as a
10871 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
10872 RELOC indicates whether the initial value of EXP requires
10873 link-time relocations. If you do not define this macro, GCC will use
10874 the symbol name prefixed by `.' as the section name. Note - this
10875 macro can now be called for uninitialized data items as well as
10876 initialised data and functions. */
10877
10878 void
10879 rs6000_unique_section (decl, reloc)
10880 tree decl;
10881 int reloc;
10882 {
10883 int len;
10884 int sec;
10885 const char *name;
10886 char *string;
10887 const char *prefix;
10888
10889 static const char *const prefixes[7][2] =
10890 {
10891 { ".rodata.", ".gnu.linkonce.r." },
10892 { ".sdata2.", ".gnu.linkonce.s2." },
10893 { ".data.", ".gnu.linkonce.d." },
10894 { ".sdata.", ".gnu.linkonce.s." },
10895 { ".bss.", ".gnu.linkonce.b." },
10896 { ".sbss.", ".gnu.linkonce.sb." },
10897 { ".text.", ".gnu.linkonce.t." }
10898 };
10899
10900 if (TREE_CODE (decl) == FUNCTION_DECL)
10901 sec = 6;
10902 else
10903 {
10904 int readonly;
10905 int needs_sdata;
10906 int size;
10907
10908 readonly = 1;
10909 if (TREE_CODE (decl) == STRING_CST)
10910 readonly = ! flag_writable_strings;
10911 else if (TREE_CODE (decl) == VAR_DECL)
10912 readonly = (! (flag_pic && reloc)
10913 && TREE_READONLY (decl)
10914 && ! TREE_SIDE_EFFECTS (decl)
10915 && TREE_CONSTANT (DECL_INITIAL (decl)));
10916
10917 size = int_size_in_bytes (TREE_TYPE (decl));
10918 needs_sdata = (size > 0
10919 && size <= g_switch_value
10920 && rs6000_sdata != SDATA_NONE
10921 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10922
10923 if (DECL_INITIAL (decl) == 0
10924 || DECL_INITIAL (decl) == error_mark_node)
10925 sec = 4;
10926 else if (! readonly)
10927 sec = 2;
10928 else
10929 sec = 0;
10930
10931 if (needs_sdata)
10932 {
10933 /* .sdata2 is only for EABI. */
10934 if (sec == 0 && rs6000_sdata != SDATA_EABI)
10935 sec = 2;
10936 sec += 1;
10937 }
10938 }
10939
10940 STRIP_NAME_ENCODING (name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
10941 prefix = prefixes[sec][DECL_ONE_ONLY (decl)];
10942 len = strlen (name) + strlen (prefix);
10943 string = alloca (len + 1);
10944
10945 sprintf (string, "%s%s", prefix, name);
10946
10947 DECL_SECTION_NAME (decl) = build_string (len, string);
10948 }
10949
10950 \f
10951 /* If we are referencing a function that is static or is known to be
10952 in this file, make the SYMBOL_REF special. We can use this to indicate
10953 that we can branch to this function without emitting a no-op after the
10954 call. For real AIX calling sequences, we also replace the
10955 function name with the real name (1 or 2 leading .'s), rather than
10956 the function descriptor name. This saves a lot of overriding code
10957 to read the prefixes. */
10958
10959 void
10960 rs6000_encode_section_info (decl)
10961 tree decl;
10962 {
10963 if (TREE_CODE (decl) == FUNCTION_DECL)
10964 {
10965 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
10966 if ((TREE_ASM_WRITTEN (decl) || ! TREE_PUBLIC (decl))
10967 && ! DECL_WEAK (decl))
10968 SYMBOL_REF_FLAG (sym_ref) = 1;
10969
10970 if (DEFAULT_ABI == ABI_AIX)
10971 {
10972 size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
10973 size_t len2 = strlen (XSTR (sym_ref, 0));
10974 char *str = alloca (len1 + len2 + 1);
10975 str[0] = '.';
10976 str[1] = '.';
10977 memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
10978
10979 XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
10980 }
10981 }
10982 else if (rs6000_sdata != SDATA_NONE
10983 && DEFAULT_ABI == ABI_V4
10984 && TREE_CODE (decl) == VAR_DECL)
10985 {
10986 int size = int_size_in_bytes (TREE_TYPE (decl));
10987 tree section_name = DECL_SECTION_NAME (decl);
10988 const char *name = (char *)0;
10989 int len = 0;
10990
10991 if (section_name)
10992 {
10993 if (TREE_CODE (section_name) == STRING_CST)
10994 {
10995 name = TREE_STRING_POINTER (section_name);
10996 len = TREE_STRING_LENGTH (section_name);
10997 }
10998 else
10999 abort ();
11000 }
11001
11002 if ((size > 0 && size <= g_switch_value)
11003 || (name
11004 && ((len == sizeof (".sdata") - 1
11005 && strcmp (name, ".sdata") == 0)
11006 || (len == sizeof (".sdata2") - 1
11007 && strcmp (name, ".sdata2") == 0)
11008 || (len == sizeof (".sbss") - 1
11009 && strcmp (name, ".sbss") == 0)
11010 || (len == sizeof (".sbss2") - 1
11011 && strcmp (name, ".sbss2") == 0)
11012 || (len == sizeof (".PPC.EMB.sdata0") - 1
11013 && strcmp (name, ".PPC.EMB.sdata0") == 0)
11014 || (len == sizeof (".PPC.EMB.sbss0") - 1
11015 && strcmp (name, ".PPC.EMB.sbss0") == 0))))
11016 {
11017 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
11018 size_t len = strlen (XSTR (sym_ref, 0));
11019 char *str = alloca (len + 2);
11020
11021 str[0] = '@';
11022 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
11023 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
11024 }
11025 }
11026 }
11027
11028 #endif /* USING_ELFOS_H */
11029
11030 \f
11031 /* Return a REG that occurs in ADDR with coefficient 1.
11032 ADDR can be effectively incremented by incrementing REG.
11033
11034 r0 is special and we must not select it as an address
11035 register by this routine since our caller will try to
11036 increment the returned register via an "la" instruction. */
11037
11038 struct rtx_def *
11039 find_addr_reg (addr)
11040 rtx addr;
11041 {
11042 while (GET_CODE (addr) == PLUS)
11043 {
11044 if (GET_CODE (XEXP (addr, 0)) == REG
11045 && REGNO (XEXP (addr, 0)) != 0)
11046 addr = XEXP (addr, 0);
11047 else if (GET_CODE (XEXP (addr, 1)) == REG
11048 && REGNO (XEXP (addr, 1)) != 0)
11049 addr = XEXP (addr, 1);
11050 else if (CONSTANT_P (XEXP (addr, 0)))
11051 addr = XEXP (addr, 1);
11052 else if (CONSTANT_P (XEXP (addr, 1)))
11053 addr = XEXP (addr, 0);
11054 else
11055 abort ();
11056 }
11057 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
11058 return addr;
11059 abort ();
11060 }
11061
11062 void
11063 rs6000_fatal_bad_address (op)
11064 rtx op;
11065 {
11066 fatal_insn ("bad address", op);
11067 }
11068
11069 /* Called to register all of our global variables with the garbage
11070 collector. */
11071
11072 static void
11073 rs6000_add_gc_roots ()
11074 {
11075 ggc_add_rtx_root (&rs6000_compare_op0, 1);
11076 ggc_add_rtx_root (&rs6000_compare_op1, 1);
11077
11078 toc_hash_table = htab_create (1021, toc_hash_function, toc_hash_eq, NULL);
11079 ggc_add_root (&toc_hash_table, 1, sizeof (toc_hash_table),
11080 toc_hash_mark_table);
11081
11082 #if TARGET_MACHO
11083 machopic_add_gc_roots ();
11084 #endif
11085 }
11086
11087 #if TARGET_MACHO
11088
11089 #if 0
11090 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
11091 reference and a constant. */
11092
11093 int
11094 symbolic_operand (op)
11095 rtx op;
11096 {
11097 switch (GET_CODE (op))
11098 {
11099 case SYMBOL_REF:
11100 case LABEL_REF:
11101 return 1;
11102 case CONST:
11103 op = XEXP (op, 0);
11104 return (GET_CODE (op) == SYMBOL_REF ||
11105 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
11106 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
11107 && GET_CODE (XEXP (op, 1)) == CONST_INT);
11108 default:
11109 return 0;
11110 }
11111 }
11112 #endif
11113
11114 #ifdef RS6000_LONG_BRANCH
11115
11116 static tree stub_list = 0;
11117
11118 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
11119 procedure calls to the linked list. */
11120
11121 void
11122 add_compiler_stub (label_name, function_name, line_number)
11123 tree label_name;
11124 tree function_name;
11125 int line_number;
11126 {
11127 tree stub = build_tree_list (function_name, label_name);
11128 TREE_TYPE (stub) = build_int_2 (line_number, 0);
11129 TREE_CHAIN (stub) = stub_list;
11130 stub_list = stub;
11131 }
11132
11133 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
11134 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
11135 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
11136
11137 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
11138 handling procedure calls from the linked list and initializes the
11139 linked list. */
11140
11141 void
11142 output_compiler_stub ()
11143 {
11144 char tmp_buf[256];
11145 char label_buf[256];
11146 char *label;
11147 tree tmp_stub, stub;
11148
11149 if (!flag_pic)
11150 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11151 {
11152 fprintf (asm_out_file,
11153 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
11154
11155 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11156 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
11157 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
11158 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11159
11160 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
11161 strcpy (label_buf,
11162 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
11163 else
11164 {
11165 label_buf[0] = '_';
11166 strcpy (label_buf+1,
11167 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
11168 }
11169
11170 strcpy (tmp_buf, "lis r12,hi16(");
11171 strcat (tmp_buf, label_buf);
11172 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
11173 strcat (tmp_buf, label_buf);
11174 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
11175 output_asm_insn (tmp_buf, 0);
11176
11177 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11178 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
11179 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
11180 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11181 }
11182
11183 stub_list = 0;
11184 }
11185
11186 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
11187 already there or not. */
11188
11189 int
11190 no_previous_def (function_name)
11191 tree function_name;
11192 {
11193 tree stub;
11194 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11195 if (function_name == STUB_FUNCTION_NAME (stub))
11196 return 0;
11197 return 1;
11198 }
11199
11200 /* GET_PREV_LABEL gets the label name from the previous definition of
11201 the function. */
11202
11203 tree
11204 get_prev_label (function_name)
11205 tree function_name;
11206 {
11207 tree stub;
11208 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11209 if (function_name == STUB_FUNCTION_NAME (stub))
11210 return STUB_LABEL_NAME (stub);
11211 return 0;
11212 }
11213
11214 /* INSN is either a function call or a millicode call. It may have an
11215 unconditional jump in its delay slot.
11216
11217 CALL_DEST is the routine we are calling. */
11218
11219 char *
11220 output_call (insn, call_dest, operand_number)
11221 rtx insn;
11222 rtx call_dest;
11223 int operand_number;
11224 {
11225 static char buf[256];
11226 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
11227 {
11228 tree labelname;
11229 tree funname = get_identifier (XSTR (call_dest, 0));
11230
11231 if (no_previous_def (funname))
11232 {
11233 int line_number;
11234 rtx label_rtx = gen_label_rtx ();
11235 char *label_buf, temp_buf[256];
11236 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
11237 CODE_LABEL_NUMBER (label_rtx));
11238 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
11239 labelname = get_identifier (label_buf);
11240 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
11241 if (insn)
11242 line_number = NOTE_LINE_NUMBER (insn);
11243 add_compiler_stub (labelname, funname, line_number);
11244 }
11245 else
11246 labelname = get_prev_label (funname);
11247
11248 sprintf (buf, "jbsr %%z%d,%.246s",
11249 operand_number, IDENTIFIER_POINTER (labelname));
11250 return buf;
11251 }
11252 else
11253 {
11254 sprintf (buf, "bl %%z%d", operand_number);
11255 return buf;
11256 }
11257 }
11258
11259 #endif /* RS6000_LONG_BRANCH */
11260
11261 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
11262 do { \
11263 const char *const symbol_ = (SYMBOL); \
11264 char *buffer_ = (BUF); \
11265 if (symbol_[0] == '"') \
11266 { \
11267 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
11268 } \
11269 else if (name_needs_quotes(symbol_)) \
11270 { \
11271 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
11272 } \
11273 else \
11274 { \
11275 sprintf(buffer_, "L%d$%s", (N), symbol_); \
11276 } \
11277 } while (0)
11278
11279
11280 /* Generate PIC and indirect symbol stubs. */
11281
11282 void
11283 machopic_output_stub (file, symb, stub)
11284 FILE *file;
11285 const char *symb, *stub;
11286 {
11287 unsigned int length;
11288 char *symbol_name, *lazy_ptr_name;
11289 char *local_label_0;
11290 static int label = 0;
11291
11292 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
11293 STRIP_NAME_ENCODING (symb, symb);
11294
11295 label += 1;
11296
11297 length = strlen (symb);
11298 symbol_name = alloca (length + 32);
11299 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
11300
11301 lazy_ptr_name = alloca (length + 32);
11302 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
11303
11304 local_label_0 = alloca (length + 32);
11305 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
11306
11307 if (flag_pic == 2)
11308 machopic_picsymbol_stub_section ();
11309 else
11310 machopic_symbol_stub_section ();
11311
11312 fprintf (file, "%s:\n", stub);
11313 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11314
11315 if (flag_pic == 2)
11316 {
11317 fprintf (file, "\tmflr r0\n");
11318 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
11319 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
11320 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
11321 lazy_ptr_name, local_label_0);
11322 fprintf (file, "\tmtlr r0\n");
11323 fprintf (file, "\tlwz r12,lo16(%s-%s)(r11)\n",
11324 lazy_ptr_name, local_label_0);
11325 fprintf (file, "\tmtctr r12\n");
11326 fprintf (file, "\taddi r11,r11,lo16(%s-%s)\n",
11327 lazy_ptr_name, local_label_0);
11328 fprintf (file, "\tbctr\n");
11329 }
11330 else
11331 fprintf (file, "non-pure not supported\n");
11332
11333 machopic_lazy_symbol_ptr_section ();
11334 fprintf (file, "%s:\n", lazy_ptr_name);
11335 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11336 fprintf (file, "\t.long dyld_stub_binding_helper\n");
11337 }
11338
11339 /* Legitimize PIC addresses. If the address is already
11340 position-independent, we return ORIG. Newly generated
11341 position-independent addresses go into a reg. This is REG if non
11342 zero, otherwise we allocate register(s) as necessary. */
11343
11344 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
11345
11346 rtx
11347 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
11348 rtx orig;
11349 enum machine_mode mode;
11350 rtx reg;
11351 {
11352 rtx base, offset;
11353
11354 if (reg == NULL && ! reload_in_progress && ! reload_completed)
11355 reg = gen_reg_rtx (Pmode);
11356
11357 if (GET_CODE (orig) == CONST)
11358 {
11359 if (GET_CODE (XEXP (orig, 0)) == PLUS
11360 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
11361 return orig;
11362
11363 if (GET_CODE (XEXP (orig, 0)) == PLUS)
11364 {
11365 base =
11366 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
11367 Pmode, reg);
11368 offset =
11369 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
11370 Pmode, reg);
11371 }
11372 else
11373 abort ();
11374
11375 if (GET_CODE (offset) == CONST_INT)
11376 {
11377 if (SMALL_INT (offset))
11378 return plus_constant (base, INTVAL (offset));
11379 else if (! reload_in_progress && ! reload_completed)
11380 offset = force_reg (Pmode, offset);
11381 else
11382 {
11383 rtx mem = force_const_mem (Pmode, orig);
11384 return machopic_legitimize_pic_address (mem, Pmode, reg);
11385 }
11386 }
11387 return gen_rtx (PLUS, Pmode, base, offset);
11388 }
11389
11390 /* Fall back on generic machopic code. */
11391 return machopic_legitimize_pic_address (orig, mode, reg);
11392 }
11393
11394 /* This is just a placeholder to make linking work without having to
11395 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
11396 ever needed for Darwin (not too likely!) this would have to get a
11397 real definition. */
11398
11399 void
11400 toc_section ()
11401 {
11402 }
11403
11404 #endif /* TARGET_MACHO */
11405
11406 #if TARGET_ELF
11407 static unsigned int
11408 rs6000_elf_section_type_flags (decl, name, reloc)
11409 tree decl;
11410 const char *name;
11411 int reloc;
11412 {
11413 unsigned int flags = default_section_type_flags (decl, name, reloc);
11414
11415 if (TARGET_RELOCATABLE)
11416 flags |= SECTION_WRITE;
11417
11418 return flags;
11419 }
11420
11421 /* Record an element in the table of global constructors. SYMBOL is
11422 a SYMBOL_REF of the function to be called; PRIORITY is a number
11423 between 0 and MAX_INIT_PRIORITY.
11424
11425 This differs from default_named_section_asm_out_constructor in
11426 that we have special handling for -mrelocatable. */
11427
11428 static void
11429 rs6000_elf_asm_out_constructor (symbol, priority)
11430 rtx symbol;
11431 int priority;
11432 {
11433 const char *section = ".ctors";
11434 char buf[16];
11435
11436 if (priority != DEFAULT_INIT_PRIORITY)
11437 {
11438 sprintf (buf, ".ctors.%.5u",
11439 /* Invert the numbering so the linker puts us in the proper
11440 order; constructors are run from right to left, and the
11441 linker sorts in increasing order. */
11442 MAX_INIT_PRIORITY - priority);
11443 section = buf;
11444 }
11445
11446 named_section_flags (section, SECTION_WRITE);
11447 assemble_align (POINTER_SIZE);
11448
11449 if (TARGET_RELOCATABLE)
11450 {
11451 fputs ("\t.long (", asm_out_file);
11452 output_addr_const (asm_out_file, symbol);
11453 fputs (")@fixup\n", asm_out_file);
11454 }
11455 else
11456 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11457 }
11458
11459 static void
11460 rs6000_elf_asm_out_destructor (symbol, priority)
11461 rtx symbol;
11462 int priority;
11463 {
11464 const char *section = ".dtors";
11465 char buf[16];
11466
11467 if (priority != DEFAULT_INIT_PRIORITY)
11468 {
11469 sprintf (buf, ".dtors.%.5u",
11470 /* Invert the numbering so the linker puts us in the proper
11471 order; constructors are run from right to left, and the
11472 linker sorts in increasing order. */
11473 MAX_INIT_PRIORITY - priority);
11474 section = buf;
11475 }
11476
11477 named_section_flags (section, SECTION_WRITE);
11478 assemble_align (POINTER_SIZE);
11479
11480 if (TARGET_RELOCATABLE)
11481 {
11482 fputs ("\t.long (", asm_out_file);
11483 output_addr_const (asm_out_file, symbol);
11484 fputs (")@fixup\n", asm_out_file);
11485 }
11486 else
11487 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11488 }
11489 #endif
11490
11491 #ifdef OBJECT_FORMAT_COFF
11492 static void
11493 xcoff_asm_named_section (name, flags)
11494 const char *name;
11495 unsigned int flags ATTRIBUTE_UNUSED;
11496 {
11497 fprintf (asm_out_file, "\t.csect %s\n", name);
11498 }
11499 #endif