1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
41 #include "basic-block.h"
42 #include "integrate.h"
48 #include "target-def.h"
49 #include "langhooks.h"
52 #ifndef TARGET_NO_PROTOTYPE
53 #define TARGET_NO_PROTOTYPE 0
56 #define min(A,B) ((A) < (B) ? (A) : (B))
57 #define max(A,B) ((A) > (B) ? (A) : (B))
61 enum processor_type rs6000_cpu
;
62 struct rs6000_cpu_select rs6000_select
[3] =
64 /* switch name, tune arch */
65 { (const char *)0, "--with-cpu=", 1, 1 },
66 { (const char *)0, "-mcpu=", 1, 1 },
67 { (const char *)0, "-mtune=", 1, 0 },
70 /* Size of long double */
71 const char *rs6000_long_double_size_string
;
72 int rs6000_long_double_type_size
;
74 /* Whether -mabi=altivec has appeared */
75 int rs6000_altivec_abi
;
77 /* Set to non-zero once AIX common-mode calls have been defined. */
78 static int common_mode_defined
;
80 /* Save information from a "cmpxx" operation until the branch or scc is
82 rtx rs6000_compare_op0
, rs6000_compare_op1
;
83 int rs6000_compare_fp_p
;
85 /* Label number of label created for -mrelocatable, to call to so we can
86 get the address of the GOT section */
87 int rs6000_pic_labelno
;
90 /* Which abi to adhere to */
91 const char *rs6000_abi_name
= RS6000_ABI_NAME
;
93 /* Semantics of the small data area */
94 enum rs6000_sdata_type rs6000_sdata
= SDATA_DATA
;
96 /* Which small data model to use */
97 const char *rs6000_sdata_name
= (char *)0;
99 /* Counter for labels which are to be placed in .fixup. */
100 int fixuplabelno
= 0;
103 /* ABI enumeration available for subtarget to use. */
104 enum rs6000_abi rs6000_current_abi
;
106 /* ABI string from -mabi= option. */
107 const char *rs6000_abi_string
;
110 const char *rs6000_debug_name
;
111 int rs6000_debug_stack
; /* debug stack applications */
112 int rs6000_debug_arg
; /* debug argument handling */
114 /* Flag to say the TOC is initialized */
116 char toc_label_name
[10];
118 /* Alias set for saves and restores from the rs6000 stack. */
119 static int rs6000_sr_alias_set
;
121 static void rs6000_add_gc_roots
PARAMS ((void));
122 static int num_insns_constant_wide
PARAMS ((HOST_WIDE_INT
));
123 static rtx expand_block_move_mem
PARAMS ((enum machine_mode
, rtx
, rtx
));
124 static void validate_condition_mode
125 PARAMS ((enum rtx_code
, enum machine_mode
));
126 static rtx rs6000_generate_compare
PARAMS ((enum rtx_code
));
127 static void rs6000_maybe_dead
PARAMS ((rtx
));
128 static void rs6000_emit_stack_tie
PARAMS ((void));
129 static void rs6000_frame_related
PARAMS ((rtx
, rtx
, HOST_WIDE_INT
, rtx
, rtx
));
130 static void rs6000_emit_allocate_stack
PARAMS ((HOST_WIDE_INT
, int));
131 static unsigned rs6000_hash_constant
PARAMS ((rtx
));
132 static unsigned toc_hash_function
PARAMS ((const void *));
133 static int toc_hash_eq
PARAMS ((const void *, const void *));
134 static int toc_hash_mark_entry
PARAMS ((void **, void *));
135 static void toc_hash_mark_table
PARAMS ((void *));
136 static int constant_pool_expr_1
PARAMS ((rtx
, int *, int *));
137 static void rs6000_free_machine_status
PARAMS ((struct function
*));
138 static void rs6000_init_machine_status
PARAMS ((struct function
*));
139 static bool rs6000_assemble_integer
PARAMS ((rtx
, unsigned int, int));
140 static int rs6000_ra_ever_killed
PARAMS ((void));
141 static tree rs6000_handle_longcall_attribute
PARAMS ((tree
*, tree
, tree
, int, bool *));
142 const struct attribute_spec rs6000_attribute_table
[];
143 static void rs6000_output_function_prologue
PARAMS ((FILE *, HOST_WIDE_INT
));
144 static void rs6000_output_function_epilogue
PARAMS ((FILE *, HOST_WIDE_INT
));
145 static rtx rs6000_emit_set_long_const
PARAMS ((rtx
,
146 HOST_WIDE_INT
, HOST_WIDE_INT
));
148 static unsigned int rs6000_elf_section_type_flags
PARAMS ((tree
, const char *,
150 static void rs6000_elf_asm_out_constructor
PARAMS ((rtx
, int));
151 static void rs6000_elf_asm_out_destructor
PARAMS ((rtx
, int));
153 #ifdef OBJECT_FORMAT_COFF
154 static void xcoff_asm_named_section
PARAMS ((const char *, unsigned int));
156 static int rs6000_adjust_cost
PARAMS ((rtx
, rtx
, rtx
, int));
157 static int rs6000_adjust_priority
PARAMS ((rtx
, int));
158 static int rs6000_issue_rate
PARAMS ((void));
160 static void rs6000_init_builtins
PARAMS ((void));
161 static void altivec_init_builtins
PARAMS ((void));
162 static rtx rs6000_expand_builtin
PARAMS ((tree
, rtx
, rtx
, enum machine_mode
, int));
163 static rtx altivec_expand_builtin
PARAMS ((tree
, rtx
));
164 static rtx altivec_expand_unop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
165 static rtx altivec_expand_binop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
166 static rtx altivec_expand_abs_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
167 static rtx altivec_expand_predicate_builtin
PARAMS ((enum insn_code
, const char *, tree
, rtx
));
168 static rtx altivec_expand_ternop_builtin
PARAMS ((enum insn_code
, tree
, rtx
));
169 static rtx altivec_expand_stv_builtin
PARAMS ((enum insn_code
, tree
));
170 static void rs6000_parse_abi_options
PARAMS ((void));
171 static int first_altivec_reg_to_save
PARAMS ((void));
172 static unsigned int compute_vrsave_mask
PARAMS ((void));
173 static void is_altivec_return_reg
PARAMS ((rtx
, void *));
174 int vrsave_operation
PARAMS ((rtx
, enum machine_mode
));
175 static rtx generate_set_vrsave
PARAMS ((rtx
, rs6000_stack_t
*, int));
176 static void altivec_frame_fixup
PARAMS ((rtx
, rtx
, HOST_WIDE_INT
));
177 static int easy_vector_constant
PARAMS ((rtx
));
179 /* Default register names. */
180 char rs6000_reg_names
[][8] =
182 "0", "1", "2", "3", "4", "5", "6", "7",
183 "8", "9", "10", "11", "12", "13", "14", "15",
184 "16", "17", "18", "19", "20", "21", "22", "23",
185 "24", "25", "26", "27", "28", "29", "30", "31",
186 "0", "1", "2", "3", "4", "5", "6", "7",
187 "8", "9", "10", "11", "12", "13", "14", "15",
188 "16", "17", "18", "19", "20", "21", "22", "23",
189 "24", "25", "26", "27", "28", "29", "30", "31",
190 "mq", "lr", "ctr","ap",
191 "0", "1", "2", "3", "4", "5", "6", "7",
193 /* AltiVec registers. */
194 "0", "1", "2", "3", "4", "5", "6", "7",
195 "8", "9", "10", "11", "12", "13", "14", "15",
196 "16", "17", "18", "19", "20", "21", "22", "23",
197 "24", "25", "26", "27", "28", "29", "30", "31",
201 #ifdef TARGET_REGNAMES
202 static const char alt_reg_names
[][8] =
204 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
205 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
206 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
207 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
208 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
209 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
210 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
211 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
212 "mq", "lr", "ctr", "ap",
213 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
215 /* AltiVec registers. */
216 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
217 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
218 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
219 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
224 #ifndef MASK_STRICT_ALIGN
225 #define MASK_STRICT_ALIGN 0
228 /* Initialize the GCC target structure. */
229 #undef TARGET_ATTRIBUTE_TABLE
230 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
232 #undef TARGET_ASM_ALIGNED_DI_OP
233 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
235 /* Default unaligned ops are only provided for ELF. Find the ops needed
236 for non-ELF systems. */
237 #ifndef OBJECT_FORMAT_ELF
238 #ifdef OBJECT_FORMAT_COFF
239 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
241 #undef TARGET_ASM_UNALIGNED_HI_OP
242 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
243 #undef TARGET_ASM_UNALIGNED_SI_OP
244 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
245 #undef TARGET_ASM_UNALIGNED_DI_OP
246 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
249 #undef TARGET_ASM_UNALIGNED_HI_OP
250 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
251 #undef TARGET_ASM_UNALIGNED_SI_OP
252 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
256 /* This hook deals with fixups for relocatable code and DI-mode objects
258 #undef TARGET_ASM_INTEGER
259 #define TARGET_ASM_INTEGER rs6000_assemble_integer
261 #undef TARGET_ASM_FUNCTION_PROLOGUE
262 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
263 #undef TARGET_ASM_FUNCTION_EPILOGUE
264 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
267 #undef TARGET_SECTION_TYPE_FLAGS
268 #define TARGET_SECTION_TYPE_FLAGS rs6000_elf_section_type_flags
271 #undef TARGET_SCHED_ISSUE_RATE
272 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
273 #undef TARGET_SCHED_ADJUST_COST
274 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
275 #undef TARGET_SCHED_ADJUST_PRIORITY
276 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
278 #undef TARGET_INIT_BUILTINS
279 #define TARGET_INIT_BUILTINS rs6000_init_builtins
281 #undef TARGET_EXPAND_BUILTIN
282 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
284 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
285 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
287 struct gcc_target targetm
= TARGET_INITIALIZER
;
289 /* Override command line options. Mostly we process the processor
290 type and sometimes adjust other TARGET_ options. */
293 rs6000_override_options (default_cpu
)
294 const char *default_cpu
;
297 struct rs6000_cpu_select
*ptr
;
299 /* Simplify the entries below by making a mask for any POWER
300 variant and any PowerPC variant. */
302 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
303 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
304 | MASK_PPC_GFXOPT | MASK_POWERPC64)
305 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
309 const char *const name
; /* Canonical processor name. */
310 const enum processor_type processor
; /* Processor type enum value. */
311 const int target_enable
; /* Target flags to enable. */
312 const int target_disable
; /* Target flags to disable. */
313 } const processor_target_table
[]
314 = {{"common", PROCESSOR_COMMON
, MASK_NEW_MNEMONICS
,
315 POWER_MASKS
| POWERPC_MASKS
},
316 {"power", PROCESSOR_POWER
,
317 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
318 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
319 {"power2", PROCESSOR_POWER
,
320 MASK_POWER
| MASK_POWER2
| MASK_MULTIPLE
| MASK_STRING
,
321 POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
322 {"power3", PROCESSOR_PPC630
,
323 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
324 POWER_MASKS
| MASK_PPC_GPOPT
},
325 {"powerpc", PROCESSOR_POWERPC
,
326 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
327 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
328 {"powerpc64", PROCESSOR_POWERPC64
,
329 MASK_POWERPC
| MASK_POWERPC64
| MASK_NEW_MNEMONICS
,
330 POWER_MASKS
| POWERPC_OPT_MASKS
},
331 {"rios", PROCESSOR_RIOS1
,
332 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
333 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
334 {"rios1", PROCESSOR_RIOS1
,
335 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
336 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
337 {"rsc", PROCESSOR_PPC601
,
338 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
339 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
340 {"rsc1", PROCESSOR_PPC601
,
341 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
,
342 MASK_POWER2
| POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
343 {"rios2", PROCESSOR_RIOS2
,
344 MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
| MASK_POWER2
,
345 POWERPC_MASKS
| MASK_NEW_MNEMONICS
},
346 {"rs64a", PROCESSOR_RS64A
,
347 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
348 POWER_MASKS
| POWERPC_OPT_MASKS
},
349 {"401", PROCESSOR_PPC403
,
350 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
351 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
352 {"403", PROCESSOR_PPC403
,
353 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
| MASK_STRICT_ALIGN
,
354 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
355 {"405", PROCESSOR_PPC405
,
356 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
357 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
358 {"505", PROCESSOR_MPCCORE
,
359 MASK_POWERPC
| MASK_NEW_MNEMONICS
,
360 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
361 {"601", PROCESSOR_PPC601
,
362 MASK_POWER
| MASK_POWERPC
| MASK_NEW_MNEMONICS
| MASK_MULTIPLE
| MASK_STRING
,
363 MASK_POWER2
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
364 {"602", PROCESSOR_PPC603
,
365 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
366 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
367 {"603", PROCESSOR_PPC603
,
368 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
369 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
370 {"603e", PROCESSOR_PPC603
,
371 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
372 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
373 {"ec603e", PROCESSOR_PPC603
,
374 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
375 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
376 {"604", PROCESSOR_PPC604
,
377 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
378 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
379 {"604e", PROCESSOR_PPC604e
,
380 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
381 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
382 {"620", PROCESSOR_PPC620
,
383 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
384 POWER_MASKS
| MASK_PPC_GPOPT
},
385 {"630", PROCESSOR_PPC630
,
386 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
387 POWER_MASKS
| MASK_PPC_GPOPT
},
388 {"740", PROCESSOR_PPC750
,
389 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
390 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
391 {"750", PROCESSOR_PPC750
,
392 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
393 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
394 {"7400", PROCESSOR_PPC7400
,
395 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
396 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
397 {"7450", PROCESSOR_PPC7450
,
398 MASK_POWERPC
| MASK_PPC_GFXOPT
| MASK_NEW_MNEMONICS
,
399 POWER_MASKS
| MASK_PPC_GPOPT
| MASK_POWERPC64
},
400 {"801", PROCESSOR_MPCCORE
,
401 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
402 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
403 {"821", PROCESSOR_MPCCORE
,
404 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
405 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
406 {"823", PROCESSOR_MPCCORE
,
407 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
408 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
},
409 {"860", PROCESSOR_MPCCORE
,
410 MASK_POWERPC
| MASK_SOFT_FLOAT
| MASK_NEW_MNEMONICS
,
411 POWER_MASKS
| POWERPC_OPT_MASKS
| MASK_POWERPC64
}};
413 size_t ptt_size
= sizeof (processor_target_table
) / sizeof (struct ptt
);
415 /* Save current -mmultiple/-mno-multiple status. */
416 int multiple
= TARGET_MULTIPLE
;
417 /* Save current -mstring/-mno-string status. */
418 int string
= TARGET_STRING
;
420 /* Identify the processor type. */
421 rs6000_select
[0].string
= default_cpu
;
422 rs6000_cpu
= TARGET_POWERPC64
? PROCESSOR_DEFAULT64
: PROCESSOR_DEFAULT
;
424 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
426 ptr
= &rs6000_select
[i
];
427 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
429 for (j
= 0; j
< ptt_size
; j
++)
430 if (! strcmp (ptr
->string
, processor_target_table
[j
].name
))
433 rs6000_cpu
= processor_target_table
[j
].processor
;
437 target_flags
|= processor_target_table
[j
].target_enable
;
438 target_flags
&= ~processor_target_table
[j
].target_disable
;
444 error ("bad value (%s) for %s switch", ptr
->string
, ptr
->name
);
448 /* If we are optimizing big endian systems for space, use the store
449 multiple instructions. */
450 if (BYTES_BIG_ENDIAN
&& optimize_size
)
451 target_flags
|= MASK_MULTIPLE
;
453 /* If -mmultiple or -mno-multiple was explicitly used, don't
454 override with the processor default */
455 if (TARGET_MULTIPLE_SET
)
456 target_flags
= (target_flags
& ~MASK_MULTIPLE
) | multiple
;
458 /* If -mstring or -mno-string was explicitly used, don't override
459 with the processor default. */
460 if (TARGET_STRING_SET
)
461 target_flags
= (target_flags
& ~MASK_STRING
) | string
;
463 /* Don't allow -mmultiple or -mstring on little endian systems
464 unless the cpu is a 750, because the hardware doesn't support the
465 instructions used in little endian mode, and causes an alignment
466 trap. The 750 does not cause an alignment trap (except when the
467 target is unaligned). */
469 if (! BYTES_BIG_ENDIAN
&& rs6000_cpu
!= PROCESSOR_PPC750
)
473 target_flags
&= ~MASK_MULTIPLE
;
474 if (TARGET_MULTIPLE_SET
)
475 warning ("-mmultiple is not supported on little endian systems");
480 target_flags
&= ~MASK_STRING
;
481 if (TARGET_STRING_SET
)
482 warning ("-mstring is not supported on little endian systems");
486 if (flag_pic
&& DEFAULT_ABI
== ABI_AIX
)
488 warning ("-f%s ignored (all code is position independent)",
489 (flag_pic
> 1) ? "PIC" : "pic");
493 #ifdef XCOFF_DEBUGGING_INFO
494 if (flag_function_sections
&& (write_symbols
!= NO_DEBUG
)
495 && DEFAULT_ABI
== ABI_AIX
)
497 warning ("-ffunction-sections disabled on AIX when debugging");
498 flag_function_sections
= 0;
501 if (flag_data_sections
&& (DEFAULT_ABI
== ABI_AIX
))
503 warning ("-fdata-sections not supported on AIX");
504 flag_data_sections
= 0;
508 /* Set debug flags */
509 if (rs6000_debug_name
)
511 if (! strcmp (rs6000_debug_name
, "all"))
512 rs6000_debug_stack
= rs6000_debug_arg
= 1;
513 else if (! strcmp (rs6000_debug_name
, "stack"))
514 rs6000_debug_stack
= 1;
515 else if (! strcmp (rs6000_debug_name
, "arg"))
516 rs6000_debug_arg
= 1;
518 error ("unknown -mdebug-%s switch", rs6000_debug_name
);
521 /* Set size of long double */
522 rs6000_long_double_type_size
= 64;
523 if (rs6000_long_double_size_string
)
526 int size
= strtol (rs6000_long_double_size_string
, &tail
, 10);
527 if (*tail
!= '\0' || (size
!= 64 && size
!= 128))
528 error ("Unknown switch -mlong-double-%s",
529 rs6000_long_double_size_string
);
531 rs6000_long_double_type_size
= size
;
534 /* Handle -mabi= options. */
535 rs6000_parse_abi_options ();
537 #ifdef TARGET_REGNAMES
538 /* If the user desires alternate register names, copy in the
539 alternate names now. */
541 memcpy (rs6000_reg_names
, alt_reg_names
, sizeof (rs6000_reg_names
));
544 #ifdef SUBTARGET_OVERRIDE_OPTIONS
545 SUBTARGET_OVERRIDE_OPTIONS
;
547 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
548 SUBSUBTARGET_OVERRIDE_OPTIONS
;
551 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
552 If -maix-struct-return or -msvr4-struct-return was explicitly
553 used, don't override with the ABI default. */
554 if (!(target_flags
& MASK_AIX_STRUCT_RET_SET
))
556 if (DEFAULT_ABI
== ABI_V4
&& !DRAFT_V4_STRUCT_RET
)
557 target_flags
= (target_flags
& ~MASK_AIX_STRUCT_RET
);
559 target_flags
|= MASK_AIX_STRUCT_RET
;
562 /* Register global variables with the garbage collector. */
563 rs6000_add_gc_roots ();
565 /* Allocate an alias set for register saves & restores from stack. */
566 rs6000_sr_alias_set
= new_alias_set ();
569 ASM_GENERATE_INTERNAL_LABEL (toc_label_name
, "LCTOC", 1);
571 /* We can only guarantee the availability of DI pseudo-ops when
572 assembling for 64-bit targets. */
575 targetm
.asm_out
.aligned_op
.di
= NULL
;
576 targetm
.asm_out
.unaligned_op
.di
= NULL
;
579 /* Arrange to save and restore machine status around nested functions. */
580 init_machine_status
= rs6000_init_machine_status
;
581 free_machine_status
= rs6000_free_machine_status
;
584 /* Handle -mabi= options. */
586 rs6000_parse_abi_options ()
588 if (rs6000_abi_string
== 0)
590 else if (! strcmp (rs6000_abi_string
, "altivec"))
591 rs6000_altivec_abi
= 1;
593 error ("unknown ABI specified: '%s'", rs6000_abi_string
);
597 optimization_options (level
, size
)
598 int level ATTRIBUTE_UNUSED
;
599 int size ATTRIBUTE_UNUSED
;
603 /* Do anything needed at the start of the asm file. */
606 rs6000_file_start (file
, default_cpu
)
608 const char *default_cpu
;
612 const char *start
= buffer
;
613 struct rs6000_cpu_select
*ptr
;
615 if (flag_verbose_asm
)
617 sprintf (buffer
, "\n%s rs6000/powerpc options:", ASM_COMMENT_START
);
618 rs6000_select
[0].string
= default_cpu
;
620 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
622 ptr
= &rs6000_select
[i
];
623 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
625 fprintf (file
, "%s %s%s", start
, ptr
->name
, ptr
->string
);
631 switch (rs6000_sdata
)
633 case SDATA_NONE
: fprintf (file
, "%s -msdata=none", start
); start
= ""; break;
634 case SDATA_DATA
: fprintf (file
, "%s -msdata=data", start
); start
= ""; break;
635 case SDATA_SYSV
: fprintf (file
, "%s -msdata=sysv", start
); start
= ""; break;
636 case SDATA_EABI
: fprintf (file
, "%s -msdata=eabi", start
); start
= ""; break;
639 if (rs6000_sdata
&& g_switch_value
)
641 fprintf (file
, "%s -G %d", start
, g_switch_value
);
652 /* Create a CONST_DOUBLE from a string. */
655 rs6000_float_const (string
, mode
)
657 enum machine_mode mode
;
659 REAL_VALUE_TYPE value
;
660 value
= REAL_VALUE_ATOF (string
, mode
);
661 return immed_real_const_1 (value
, mode
);
664 /* Return non-zero if this function is known to have a null epilogue. */
669 if (reload_completed
)
671 rs6000_stack_t
*info
= rs6000_stack_info ();
673 if (info
->first_gp_reg_save
== 32
674 && info
->first_fp_reg_save
== 64
675 && info
->first_altivec_reg_save
== LAST_ALTIVEC_REGNO
+ 1
678 && info
->vrsave_mask
== 0
686 /* Returns 1 always. */
689 any_operand (op
, mode
)
690 rtx op ATTRIBUTE_UNUSED
;
691 enum machine_mode mode ATTRIBUTE_UNUSED
;
696 /* Returns 1 if op is the count register. */
698 count_register_operand (op
, mode
)
700 enum machine_mode mode ATTRIBUTE_UNUSED
;
702 if (GET_CODE (op
) != REG
)
705 if (REGNO (op
) == COUNT_REGISTER_REGNUM
)
708 if (REGNO (op
) > FIRST_PSEUDO_REGISTER
)
714 /* Returns 1 if op is an altivec register. */
716 altivec_register_operand (op
, mode
)
718 enum machine_mode mode ATTRIBUTE_UNUSED
;
721 return (register_operand (op
, mode
)
722 && (GET_CODE (op
) != REG
723 || REGNO (op
) > FIRST_PSEUDO_REGISTER
724 || ALTIVEC_REGNO_P (REGNO (op
))));
728 xer_operand (op
, mode
)
730 enum machine_mode mode ATTRIBUTE_UNUSED
;
732 if (GET_CODE (op
) != REG
)
735 if (XER_REGNO_P (REGNO (op
)))
741 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
742 by such constants completes more quickly. */
745 s8bit_cint_operand (op
, mode
)
747 enum machine_mode mode ATTRIBUTE_UNUSED
;
749 return ( GET_CODE (op
) == CONST_INT
750 && (INTVAL (op
) >= -128 && INTVAL (op
) <= 127));
753 /* Return 1 if OP is a constant that can fit in a D field. */
756 short_cint_operand (op
, mode
)
758 enum machine_mode mode ATTRIBUTE_UNUSED
;
760 return (GET_CODE (op
) == CONST_INT
761 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I'));
764 /* Similar for an unsigned D field. */
767 u_short_cint_operand (op
, mode
)
769 enum machine_mode mode ATTRIBUTE_UNUSED
;
771 return (GET_CODE (op
) == CONST_INT
772 && CONST_OK_FOR_LETTER_P (INTVAL (op
) & GET_MODE_MASK (mode
), 'K'));
775 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
778 non_short_cint_operand (op
, mode
)
780 enum machine_mode mode ATTRIBUTE_UNUSED
;
782 return (GET_CODE (op
) == CONST_INT
783 && (unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x8000) >= 0x10000);
786 /* Returns 1 if OP is a CONST_INT that is a positive value
787 and an exact power of 2. */
790 exact_log2_cint_operand (op
, mode
)
792 enum machine_mode mode ATTRIBUTE_UNUSED
;
794 return (GET_CODE (op
) == CONST_INT
796 && exact_log2 (INTVAL (op
)) >= 0);
799 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
803 gpc_reg_operand (op
, mode
)
805 enum machine_mode mode
;
807 return (register_operand (op
, mode
)
808 && (GET_CODE (op
) != REG
809 || (REGNO (op
) >= ARG_POINTER_REGNUM
810 && !XER_REGNO_P (REGNO (op
)))
811 || REGNO (op
) < MQ_REGNO
));
814 /* Returns 1 if OP is either a pseudo-register or a register denoting a
818 cc_reg_operand (op
, mode
)
820 enum machine_mode mode
;
822 return (register_operand (op
, mode
)
823 && (GET_CODE (op
) != REG
824 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
825 || CR_REGNO_P (REGNO (op
))));
828 /* Returns 1 if OP is either a pseudo-register or a register denoting a
829 CR field that isn't CR0. */
832 cc_reg_not_cr0_operand (op
, mode
)
834 enum machine_mode mode
;
836 return (register_operand (op
, mode
)
837 && (GET_CODE (op
) != REG
838 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
839 || CR_REGNO_NOT_CR0_P (REGNO (op
))));
842 /* Returns 1 if OP is either a constant integer valid for a D-field or
843 a non-special register. If a register, it must be in the proper
844 mode unless MODE is VOIDmode. */
847 reg_or_short_operand (op
, mode
)
849 enum machine_mode mode
;
851 return short_cint_operand (op
, mode
) || gpc_reg_operand (op
, mode
);
854 /* Similar, except check if the negation of the constant would be
855 valid for a D-field. */
858 reg_or_neg_short_operand (op
, mode
)
860 enum machine_mode mode
;
862 if (GET_CODE (op
) == CONST_INT
)
863 return CONST_OK_FOR_LETTER_P (INTVAL (op
), 'P');
865 return gpc_reg_operand (op
, mode
);
868 /* Returns 1 if OP is either a constant integer valid for a DS-field or
869 a non-special register. If a register, it must be in the proper
870 mode unless MODE is VOIDmode. */
873 reg_or_aligned_short_operand (op
, mode
)
875 enum machine_mode mode
;
877 if (gpc_reg_operand (op
, mode
))
879 else if (short_cint_operand (op
, mode
) && !(INTVAL (op
) & 3))
886 /* Return 1 if the operand is either a register or an integer whose
887 high-order 16 bits are zero. */
890 reg_or_u_short_operand (op
, mode
)
892 enum machine_mode mode
;
894 return u_short_cint_operand (op
, mode
) || gpc_reg_operand (op
, mode
);
897 /* Return 1 is the operand is either a non-special register or ANY
901 reg_or_cint_operand (op
, mode
)
903 enum machine_mode mode
;
905 return (GET_CODE (op
) == CONST_INT
|| gpc_reg_operand (op
, mode
));
908 /* Return 1 is the operand is either a non-special register or ANY
909 32-bit signed constant integer. */
912 reg_or_arith_cint_operand (op
, mode
)
914 enum machine_mode mode
;
916 return (gpc_reg_operand (op
, mode
)
917 || (GET_CODE (op
) == CONST_INT
918 #if HOST_BITS_PER_WIDE_INT != 32
919 && ((unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x80000000)
920 < (unsigned HOST_WIDE_INT
) 0x100000000ll
)
925 /* Return 1 is the operand is either a non-special register or a 32-bit
926 signed constant integer valid for 64-bit addition. */
929 reg_or_add_cint64_operand (op
, mode
)
931 enum machine_mode mode
;
933 return (gpc_reg_operand (op
, mode
)
934 || (GET_CODE (op
) == CONST_INT
935 && INTVAL (op
) < 0x7fff8000
936 #if HOST_BITS_PER_WIDE_INT != 32
937 && ((unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x80008000)
943 /* Return 1 is the operand is either a non-special register or a 32-bit
944 signed constant integer valid for 64-bit subtraction. */
947 reg_or_sub_cint64_operand (op
, mode
)
949 enum machine_mode mode
;
951 return (gpc_reg_operand (op
, mode
)
952 || (GET_CODE (op
) == CONST_INT
953 && (- INTVAL (op
)) < 0x7fff8000
954 #if HOST_BITS_PER_WIDE_INT != 32
955 && ((unsigned HOST_WIDE_INT
) ((- INTVAL (op
)) + 0x80008000)
961 /* Return 1 is the operand is either a non-special register or ANY
962 32-bit unsigned constant integer. */
965 reg_or_logical_cint_operand (op
, mode
)
967 enum machine_mode mode
;
969 if (GET_CODE (op
) == CONST_INT
)
971 if (GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
)
973 if (GET_MODE_BITSIZE (mode
) <= 32)
980 return ((INTVAL (op
) & GET_MODE_MASK (mode
)
981 & (~ (unsigned HOST_WIDE_INT
) 0xffffffff)) == 0);
983 else if (GET_CODE (op
) == CONST_DOUBLE
)
985 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
989 return CONST_DOUBLE_HIGH (op
) == 0;
992 return gpc_reg_operand (op
, mode
);
995 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
998 got_operand (op
, mode
)
1000 enum machine_mode mode ATTRIBUTE_UNUSED
;
1002 return (GET_CODE (op
) == SYMBOL_REF
1003 || GET_CODE (op
) == CONST
1004 || GET_CODE (op
) == LABEL_REF
);
1007 /* Return 1 if the operand is a simple references that can be loaded via
1008 the GOT (labels involving addition aren't allowed). */
1011 got_no_const_operand (op
, mode
)
1013 enum machine_mode mode ATTRIBUTE_UNUSED
;
1015 return (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
);
1018 /* Return the number of instructions it takes to form a constant in an
1019 integer register. */
1022 num_insns_constant_wide (value
)
1023 HOST_WIDE_INT value
;
1025 /* signed constant loadable with {cal|addi} */
1026 if (CONST_OK_FOR_LETTER_P (value
, 'I'))
1029 /* constant loadable with {cau|addis} */
1030 else if (CONST_OK_FOR_LETTER_P (value
, 'L'))
1033 #if HOST_BITS_PER_WIDE_INT == 64
1034 else if (TARGET_POWERPC64
)
1036 HOST_WIDE_INT low
= value
& 0xffffffff;
1037 HOST_WIDE_INT high
= value
>> 32;
1039 low
= (low
^ 0x80000000) - 0x80000000; /* sign extend */
1041 if (high
== 0 && (low
& 0x80000000) == 0)
1044 else if (high
== -1 && (low
& 0x80000000) != 0)
1048 return num_insns_constant_wide (high
) + 1;
1051 return (num_insns_constant_wide (high
)
1052 + num_insns_constant_wide (low
) + 1);
1061 num_insns_constant (op
, mode
)
1063 enum machine_mode mode
;
1065 if (GET_CODE (op
) == CONST_INT
)
1067 #if HOST_BITS_PER_WIDE_INT == 64
1068 if ((INTVAL (op
) >> 31) != 0 && (INTVAL (op
) >> 31) != -1
1069 && mask64_operand (op
, mode
))
1073 return num_insns_constant_wide (INTVAL (op
));
1076 else if (GET_CODE (op
) == CONST_DOUBLE
&& mode
== SFmode
)
1081 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1082 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
1083 return num_insns_constant_wide ((HOST_WIDE_INT
) l
);
1086 else if (GET_CODE (op
) == CONST_DOUBLE
)
1092 int endian
= (WORDS_BIG_ENDIAN
== 0);
1094 if (mode
== VOIDmode
|| mode
== DImode
)
1096 high
= CONST_DOUBLE_HIGH (op
);
1097 low
= CONST_DOUBLE_LOW (op
);
1101 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1102 REAL_VALUE_TO_TARGET_DOUBLE (rv
, l
);
1104 low
= l
[1 - endian
];
1108 return (num_insns_constant_wide (low
)
1109 + num_insns_constant_wide (high
));
1113 if (high
== 0 && low
>= 0)
1114 return num_insns_constant_wide (low
);
1116 else if (high
== -1 && low
< 0)
1117 return num_insns_constant_wide (low
);
1119 else if (mask64_operand (op
, mode
))
1123 return num_insns_constant_wide (high
) + 1;
1126 return (num_insns_constant_wide (high
)
1127 + num_insns_constant_wide (low
) + 1);
1135 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1136 register with one instruction per word. We only do this if we can
1137 safely read CONST_DOUBLE_{LOW,HIGH}. */
1140 easy_fp_constant (op
, mode
)
1142 enum machine_mode mode
;
1144 if (GET_CODE (op
) != CONST_DOUBLE
1145 || GET_MODE (op
) != mode
1146 || (GET_MODE_CLASS (mode
) != MODE_FLOAT
&& mode
!= DImode
))
1149 /* Consider all constants with -msoft-float to be easy. */
1150 if (TARGET_SOFT_FLOAT
&& mode
!= DImode
)
1153 /* If we are using V.4 style PIC, consider all constants to be hard. */
1154 if (flag_pic
&& DEFAULT_ABI
== ABI_V4
)
1157 #ifdef TARGET_RELOCATABLE
1158 /* Similarly if we are using -mrelocatable, consider all constants
1160 if (TARGET_RELOCATABLE
)
1169 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1170 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
1172 return (num_insns_constant_wide ((HOST_WIDE_INT
)k
[0]) == 1
1173 && num_insns_constant_wide ((HOST_WIDE_INT
)k
[1]) == 1);
1176 else if (mode
== SFmode
)
1181 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1182 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
1184 return num_insns_constant_wide (l
) == 1;
1187 else if (mode
== DImode
)
1188 return ((TARGET_POWERPC64
1189 && GET_CODE (op
) == CONST_DOUBLE
&& CONST_DOUBLE_LOW (op
) == 0)
1190 || (num_insns_constant (op
, DImode
) <= 2));
1192 else if (mode
== SImode
)
1198 /* Return 1 if the operand is a CONST_INT and can be put into a
1199 register with one instruction. */
1202 easy_vector_constant (op
)
1208 if (GET_CODE (op
) != CONST_VECTOR
)
1211 units
= CONST_VECTOR_NUNITS (op
);
1213 /* We can generate 0 easily. Look for that. */
1214 for (i
= 0; i
< units
; ++i
)
1216 elt
= CONST_VECTOR_ELT (op
, i
);
1218 /* We could probably simplify this by just checking for equality
1219 with CONST0_RTX for the current mode, but let's be safe
1222 if (GET_CODE (elt
) == CONST_INT
&& INTVAL (elt
) != 0)
1225 if (GET_CODE (elt
) == CONST_DOUBLE
1226 && (CONST_DOUBLE_LOW (elt
) != 0
1227 || CONST_DOUBLE_HIGH (elt
) != 0))
1231 /* We could probably generate a few other constants trivially, but
1232 gcc doesn't generate them yet. FIXME later. */
1236 /* Return 1 if the operand is the constant 0. This works for scalars
1237 as well as vectors. */
1239 zero_constant (op
, mode
)
1241 enum machine_mode mode
;
1243 return op
== CONST0_RTX (mode
);
1246 /* Return 1 if the operand is 0.0. */
1248 zero_fp_constant (op
, mode
)
1250 enum machine_mode mode
;
1252 return GET_MODE_CLASS (mode
) == MODE_FLOAT
&& op
== CONST0_RTX (mode
);
1255 /* Return 1 if the operand is in volatile memory. Note that during
1256 the RTL generation phase, memory_operand does not return TRUE for
1257 volatile memory references. So this function allows us to
1258 recognize volatile references where its safe. */
1261 volatile_mem_operand (op
, mode
)
1263 enum machine_mode mode
;
1265 if (GET_CODE (op
) != MEM
)
1268 if (!MEM_VOLATILE_P (op
))
1271 if (mode
!= GET_MODE (op
))
1274 if (reload_completed
)
1275 return memory_operand (op
, mode
);
1277 if (reload_in_progress
)
1278 return strict_memory_address_p (mode
, XEXP (op
, 0));
1280 return memory_address_p (mode
, XEXP (op
, 0));
1283 /* Return 1 if the operand is an offsettable memory operand. */
1286 offsettable_mem_operand (op
, mode
)
1288 enum machine_mode mode
;
1290 return ((GET_CODE (op
) == MEM
)
1291 && offsettable_address_p (reload_completed
|| reload_in_progress
,
1292 mode
, XEXP (op
, 0)));
1295 /* Return 1 if the operand is either an easy FP constant (see above) or
1299 mem_or_easy_const_operand (op
, mode
)
1301 enum machine_mode mode
;
1303 return memory_operand (op
, mode
) || easy_fp_constant (op
, mode
);
1306 /* Return 1 if the operand is either a non-special register or an item
1307 that can be used as the operand of a `mode' add insn. */
1310 add_operand (op
, mode
)
1312 enum machine_mode mode
;
1314 if (GET_CODE (op
) == CONST_INT
)
1315 return (CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I')
1316 || CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'));
1318 return gpc_reg_operand (op
, mode
);
1321 /* Return 1 if OP is a constant but not a valid add_operand. */
1324 non_add_cint_operand (op
, mode
)
1326 enum machine_mode mode ATTRIBUTE_UNUSED
;
1328 return (GET_CODE (op
) == CONST_INT
1329 && !CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I')
1330 && !CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'));
1333 /* Return 1 if the operand is a non-special register or a constant that
1334 can be used as the operand of an OR or XOR insn on the RS/6000. */
1337 logical_operand (op
, mode
)
1339 enum machine_mode mode
;
1341 HOST_WIDE_INT opl
, oph
;
1343 if (gpc_reg_operand (op
, mode
))
1346 if (GET_CODE (op
) == CONST_INT
)
1348 opl
= INTVAL (op
) & GET_MODE_MASK (mode
);
1350 #if HOST_BITS_PER_WIDE_INT <= 32
1351 if (GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
&& opl
< 0)
1355 else if (GET_CODE (op
) == CONST_DOUBLE
)
1357 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
1360 opl
= CONST_DOUBLE_LOW (op
);
1361 oph
= CONST_DOUBLE_HIGH (op
);
1368 return ((opl
& ~ (unsigned HOST_WIDE_INT
) 0xffff) == 0
1369 || (opl
& ~ (unsigned HOST_WIDE_INT
) 0xffff0000) == 0);
1372 /* Return 1 if C is a constant that is not a logical operand (as
1373 above), but could be split into one. */
1376 non_logical_cint_operand (op
, mode
)
1378 enum machine_mode mode
;
1380 return ((GET_CODE (op
) == CONST_INT
|| GET_CODE (op
) == CONST_DOUBLE
)
1381 && ! logical_operand (op
, mode
)
1382 && reg_or_logical_cint_operand (op
, mode
));
1385 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1386 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1387 Reject all ones and all zeros, since these should have been optimized
1388 away and confuse the making of MB and ME. */
1391 mask_operand (op
, mode
)
1393 enum machine_mode mode ATTRIBUTE_UNUSED
;
1395 HOST_WIDE_INT c
, lsb
;
1397 if (GET_CODE (op
) != CONST_INT
)
1402 /* Fail in 64-bit mode if the mask wraps around because the upper
1403 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1404 if (TARGET_POWERPC64
&& (c
& 0x80000001) == 0x80000001)
1407 /* We don't change the number of transitions by inverting,
1408 so make sure we start with the LS bit zero. */
1412 /* Reject all zeros or all ones. */
1416 /* Find the first transition. */
1419 /* Invert to look for a second transition. */
1422 /* Erase first transition. */
1425 /* Find the second transition (if any). */
1428 /* Match if all the bits above are 1's (or c is zero). */
1432 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1433 It is if there are no more than one 1->0 or 0->1 transitions.
1434 Reject all ones and all zeros, since these should have been optimized
1435 away and confuse the making of MB and ME. */
1438 mask64_operand (op
, mode
)
1440 enum machine_mode mode
;
1442 if (GET_CODE (op
) == CONST_INT
)
1444 HOST_WIDE_INT c
, lsb
;
1446 /* We don't change the number of transitions by inverting,
1447 so make sure we start with the LS bit zero. */
1452 /* Reject all zeros or all ones. */
1456 /* Find the transition, and check that all bits above are 1's. */
1460 else if (GET_CODE (op
) == CONST_DOUBLE
1461 && (mode
== VOIDmode
|| mode
== DImode
))
1463 HOST_WIDE_INT low
, high
, lsb
;
1465 if (HOST_BITS_PER_WIDE_INT
< 64)
1466 high
= CONST_DOUBLE_HIGH (op
);
1468 low
= CONST_DOUBLE_LOW (op
);
1471 if (HOST_BITS_PER_WIDE_INT
< 64)
1478 if (HOST_BITS_PER_WIDE_INT
>= 64 || high
== 0)
1482 return high
== -lsb
;
1486 return low
== -lsb
&& (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0);
1492 /* Return 1 if the operand is either a non-special register or a constant
1493 that can be used as the operand of a PowerPC64 logical AND insn. */
1496 and64_operand (op
, mode
)
1498 enum machine_mode mode
;
1500 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
1501 return (gpc_reg_operand (op
, mode
) || mask64_operand (op
, mode
));
1503 return (logical_operand (op
, mode
) || mask64_operand (op
, mode
));
1506 /* Return 1 if the operand is either a non-special register or a
1507 constant that can be used as the operand of an RS/6000 logical AND insn. */
1510 and_operand (op
, mode
)
1512 enum machine_mode mode
;
1514 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
1515 return (gpc_reg_operand (op
, mode
) || mask_operand (op
, mode
));
1517 return (logical_operand (op
, mode
) || mask_operand (op
, mode
));
1520 /* Return 1 if the operand is a general register or memory operand. */
1523 reg_or_mem_operand (op
, mode
)
1525 enum machine_mode mode
;
1527 return (gpc_reg_operand (op
, mode
)
1528 || memory_operand (op
, mode
)
1529 || volatile_mem_operand (op
, mode
));
1532 /* Return 1 if the operand is a general register or memory operand without
1533 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1537 lwa_operand (op
, mode
)
1539 enum machine_mode mode
;
1543 if (reload_completed
&& GET_CODE (inner
) == SUBREG
)
1544 inner
= SUBREG_REG (inner
);
1546 return gpc_reg_operand (inner
, mode
)
1547 || (memory_operand (inner
, mode
)
1548 && GET_CODE (XEXP (inner
, 0)) != PRE_INC
1549 && GET_CODE (XEXP (inner
, 0)) != PRE_DEC
1550 && (GET_CODE (XEXP (inner
, 0)) != PLUS
1551 || GET_CODE (XEXP (XEXP (inner
, 0), 1)) != CONST_INT
1552 || INTVAL (XEXP (XEXP (inner
, 0), 1)) % 4 == 0));
1555 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1556 to CALL. This is a SYMBOL_REF or a pseudo-register, which will be
1560 call_operand (op
, mode
)
1562 enum machine_mode mode
;
1564 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1567 return (GET_CODE (op
) == SYMBOL_REF
1568 || (GET_CODE (op
) == REG
&& REGNO (op
) >= FIRST_PSEUDO_REGISTER
));
1571 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1572 this file and the function is not weakly defined. */
1575 current_file_function_operand (op
, mode
)
1577 enum machine_mode mode ATTRIBUTE_UNUSED
;
1579 return (GET_CODE (op
) == SYMBOL_REF
1580 && (SYMBOL_REF_FLAG (op
)
1581 || (op
== XEXP (DECL_RTL (current_function_decl
), 0)
1582 && ! DECL_WEAK (current_function_decl
))));
1585 /* Return 1 if this operand is a valid input for a move insn. */
1588 input_operand (op
, mode
)
1590 enum machine_mode mode
;
1592 /* Memory is always valid. */
1593 if (memory_operand (op
, mode
))
1596 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1597 if (GET_CODE (op
) == CONSTANT_P_RTX
)
1600 /* For floating-point, easy constants are valid. */
1601 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
1603 && easy_fp_constant (op
, mode
))
1606 /* Allow any integer constant. */
1607 if (GET_MODE_CLASS (mode
) == MODE_INT
1608 && (GET_CODE (op
) == CONST_INT
1609 || GET_CODE (op
) == CONST_DOUBLE
))
1612 /* For floating-point or multi-word mode, the only remaining valid type
1614 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
1615 || GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
1616 return register_operand (op
, mode
);
1618 /* The only cases left are integral modes one word or smaller (we
1619 do not get called for MODE_CC values). These can be in any
1621 if (register_operand (op
, mode
))
1624 /* A SYMBOL_REF referring to the TOC is valid. */
1625 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op
))
1628 /* A constant pool expression (relative to the TOC) is valid */
1629 if (TOC_RELATIVE_EXPR_P (op
))
1632 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1634 if (DEFAULT_ABI
== ABI_V4
1635 && (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == CONST
)
1636 && small_data_operand (op
, Pmode
))
1642 /* Return 1 for an operand in small memory on V.4/eabi. */
1645 small_data_operand (op
, mode
)
1646 rtx op ATTRIBUTE_UNUSED
;
1647 enum machine_mode mode ATTRIBUTE_UNUSED
;
1652 if (rs6000_sdata
== SDATA_NONE
|| rs6000_sdata
== SDATA_DATA
)
1655 if (DEFAULT_ABI
!= ABI_V4
)
1658 if (GET_CODE (op
) == SYMBOL_REF
)
1661 else if (GET_CODE (op
) != CONST
1662 || GET_CODE (XEXP (op
, 0)) != PLUS
1663 || GET_CODE (XEXP (XEXP (op
, 0), 0)) != SYMBOL_REF
1664 || GET_CODE (XEXP (XEXP (op
, 0), 1)) != CONST_INT
)
1669 rtx sum
= XEXP (op
, 0);
1670 HOST_WIDE_INT summand
;
1672 /* We have to be careful here, because it is the referenced address
1673 that must be 32k from _SDA_BASE_, not just the symbol. */
1674 summand
= INTVAL (XEXP (sum
, 1));
1675 if (summand
< 0 || summand
> g_switch_value
)
1678 sym_ref
= XEXP (sum
, 0);
1681 if (*XSTR (sym_ref
, 0) != '@')
1692 constant_pool_expr_1 (op
, have_sym
, have_toc
)
1697 switch (GET_CODE(op
))
1700 if (CONSTANT_POOL_ADDRESS_P (op
))
1702 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op
), Pmode
))
1710 else if (! strcmp (XSTR (op
, 0), toc_label_name
))
1719 return (constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
)
1720 && constant_pool_expr_1 (XEXP (op
, 1), have_sym
, have_toc
));
1722 return constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
);
1731 constant_pool_expr_p (op
)
1736 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_sym
;
1740 toc_relative_expr_p (op
)
1745 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_toc
;
1748 /* Try machine-dependent ways of modifying an illegitimate address
1749 to be legitimate. If we find one, return the new, valid address.
1750 This is used from only one place: `memory_address' in explow.c.
1752 OLDX is the address as it was before break_out_memory_refs was
1753 called. In some cases it is useful to look at this to decide what
1756 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
1758 It is always safe for this function to do nothing. It exists to
1759 recognize opportunities to optimize the output.
1761 On RS/6000, first check for the sum of a register with a constant
1762 integer that is out of range. If so, generate code to add the
1763 constant with the low-order 16 bits masked to the register and force
1764 this result into another register (this can be done with `cau').
1765 Then generate an address of REG+(CONST&0xffff), allowing for the
1766 possibility of bit 16 being a one.
1768 Then check for the sum of a register and something not constant, try to
1769 load the other things into a register and return the sum. */
1771 rs6000_legitimize_address (x
, oldx
, mode
)
1773 rtx oldx ATTRIBUTE_UNUSED
;
1774 enum machine_mode mode
;
1776 if (GET_CODE (x
) == PLUS
1777 && GET_CODE (XEXP (x
, 0)) == REG
1778 && GET_CODE (XEXP (x
, 1)) == CONST_INT
1779 && (unsigned HOST_WIDE_INT
) (INTVAL (XEXP (x
, 1)) + 0x8000) >= 0x10000)
1781 HOST_WIDE_INT high_int
, low_int
;
1783 high_int
= INTVAL (XEXP (x
, 1)) & (~ (HOST_WIDE_INT
) 0xffff);
1784 low_int
= INTVAL (XEXP (x
, 1)) & 0xffff;
1785 if (low_int
& 0x8000)
1786 high_int
+= 0x10000, low_int
|= ((HOST_WIDE_INT
) -1) << 16;
1787 sum
= force_operand (gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
1788 GEN_INT (high_int
)), 0);
1789 return gen_rtx_PLUS (Pmode
, sum
, GEN_INT (low_int
));
1791 else if (GET_CODE (x
) == PLUS
1792 && GET_CODE (XEXP (x
, 0)) == REG
1793 && GET_CODE (XEXP (x
, 1)) != CONST_INT
1794 && GET_MODE_NUNITS (mode
) == 1
1795 && (TARGET_HARD_FLOAT
|| TARGET_POWERPC64
|| mode
!= DFmode
)
1796 && (TARGET_POWERPC64
|| mode
!= DImode
)
1799 return gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
1800 force_reg (Pmode
, force_operand (XEXP (x
, 1), 0)));
1802 else if (ALTIVEC_VECTOR_MODE (mode
))
1806 /* Make sure both operands are registers. */
1807 if (GET_CODE (x
) == PLUS
)
1808 return gen_rtx_PLUS (Pmode
, force_reg (Pmode
, XEXP (x
, 0)),
1809 force_reg (Pmode
, XEXP (x
, 1)));
1811 reg
= force_reg (Pmode
, x
);
1814 else if (TARGET_ELF
&& TARGET_32BIT
&& TARGET_NO_TOC
&& ! flag_pic
1815 && GET_CODE (x
) != CONST_INT
1816 && GET_CODE (x
) != CONST_DOUBLE
1818 && GET_MODE_NUNITS (mode
) == 1
1819 && (GET_MODE_BITSIZE (mode
) <= 32
1820 || (TARGET_HARD_FLOAT
&& mode
== DFmode
)))
1822 rtx reg
= gen_reg_rtx (Pmode
);
1823 emit_insn (gen_elf_high (reg
, (x
)));
1824 return gen_rtx_LO_SUM (Pmode
, reg
, (x
));
1826 else if (TARGET_MACHO
&& TARGET_32BIT
&& TARGET_NO_TOC
1828 && GET_CODE (x
) != CONST_INT
1829 && GET_CODE (x
) != CONST_DOUBLE
1831 && (TARGET_HARD_FLOAT
|| mode
!= DFmode
)
1835 rtx reg
= gen_reg_rtx (Pmode
);
1836 emit_insn (gen_macho_high (reg
, (x
)));
1837 return gen_rtx_LO_SUM (Pmode
, reg
, (x
));
1840 && CONSTANT_POOL_EXPR_P (x
)
1841 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), Pmode
))
1843 return create_TOC_reference (x
);
1849 /* The convention appears to be to define this wherever it is used.
1850 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
1851 is now used here. */
1852 #ifndef REG_MODE_OK_FOR_BASE_P
1853 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
1856 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
1857 replace the input X, or the original X if no replacement is called for.
1858 The output parameter *WIN is 1 if the calling macro should goto WIN,
1861 For RS/6000, we wish to handle large displacements off a base
1862 register by splitting the addend across an addiu/addis and the mem insn.
1863 This cuts number of extra insns needed from 3 to 1.
1865 On Darwin, we use this to generate code for floating point constants.
1866 A movsf_low is generated so we wind up with 2 instructions rather than 3.
1867 The Darwin code is inside #if TARGET_MACHO because only then is
1868 machopic_function_base_name() defined. */
1870 rs6000_legitimize_reload_address (x
, mode
, opnum
, type
, ind_levels
, win
)
1872 enum machine_mode mode
;
1875 int ind_levels ATTRIBUTE_UNUSED
;
1878 /* We must recognize output that we have already generated ourselves. */
1879 if (GET_CODE (x
) == PLUS
1880 && GET_CODE (XEXP (x
, 0)) == PLUS
1881 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
1882 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
1883 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
1885 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
1886 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
1887 opnum
, (enum reload_type
)type
);
1893 if (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
1894 && GET_CODE (x
) == LO_SUM
1895 && GET_CODE (XEXP (x
, 0)) == PLUS
1896 && XEXP (XEXP (x
, 0), 0) == pic_offset_table_rtx
1897 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == HIGH
1898 && GET_CODE (XEXP (XEXP (XEXP (x
, 0), 1), 0)) == CONST
1899 && XEXP (XEXP (XEXP (x
, 0), 1), 0) == XEXP (x
, 1)
1900 && GET_CODE (XEXP (XEXP (x
, 1), 0)) == MINUS
1901 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 0)) == SYMBOL_REF
1902 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 1)) == SYMBOL_REF
)
1904 /* Result of previous invocation of this function on Darwin
1905 floating point constant. */
1906 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
1907 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
1908 opnum
, (enum reload_type
)type
);
1913 if (GET_CODE (x
) == PLUS
1914 && GET_CODE (XEXP (x
, 0)) == REG
1915 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
1916 && REG_MODE_OK_FOR_BASE_P (XEXP (x
, 0), mode
)
1917 && GET_CODE (XEXP (x
, 1)) == CONST_INT
1918 && !ALTIVEC_VECTOR_MODE (mode
))
1920 HOST_WIDE_INT val
= INTVAL (XEXP (x
, 1));
1921 HOST_WIDE_INT low
= ((val
& 0xffff) ^ 0x8000) - 0x8000;
1923 = (((val
- low
) & 0xffffffff) ^ 0x80000000) - 0x80000000;
1925 /* Check for 32-bit overflow. */
1926 if (high
+ low
!= val
)
1932 /* Reload the high part into a base reg; leave the low part
1933 in the mem directly. */
1935 x
= gen_rtx_PLUS (GET_MODE (x
),
1936 gen_rtx_PLUS (GET_MODE (x
), XEXP (x
, 0),
1940 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
1941 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
1942 opnum
, (enum reload_type
)type
);
1947 if (GET_CODE (x
) == SYMBOL_REF
1948 && DEFAULT_ABI
== ABI_DARWIN
1949 && !ALTIVEC_VECTOR_MODE (mode
)
1952 /* Darwin load of floating point constant. */
1953 rtx offset
= gen_rtx (CONST
, Pmode
,
1954 gen_rtx (MINUS
, Pmode
, x
,
1955 gen_rtx (SYMBOL_REF
, Pmode
,
1956 machopic_function_base_name ())));
1957 x
= gen_rtx (LO_SUM
, GET_MODE (x
),
1958 gen_rtx (PLUS
, Pmode
, pic_offset_table_rtx
,
1959 gen_rtx (HIGH
, Pmode
, offset
)), offset
);
1960 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
1961 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
1962 opnum
, (enum reload_type
)type
);
1968 && CONSTANT_POOL_EXPR_P (x
)
1969 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), mode
))
1971 (x
) = create_TOC_reference (x
);
1979 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
1980 that is a valid memory address for an instruction.
1981 The MODE argument is the machine mode for the MEM expression
1982 that wants to use this address.
1984 On the RS/6000, there are four valid address: a SYMBOL_REF that
1985 refers to a constant pool entry of an address (or the sum of it
1986 plus a constant), a short (16-bit signed) constant plus a register,
1987 the sum of two registers, or a register indirect, possibly with an
1988 auto-increment. For DFmode and DImode with an constant plus register,
1989 we must ensure that both words are addressable or PowerPC64 with offset
1992 For modes spanning multiple registers (DFmode in 32-bit GPRs,
1993 32-bit DImode, TImode), indexed addressing cannot be used because
1994 adjacent memory cells are accessed by adding word-sized offsets
1995 during assembly output. */
1997 rs6000_legitimate_address (mode
, x
, reg_ok_strict
)
1998 enum machine_mode mode
;
2002 if (LEGITIMATE_INDIRECT_ADDRESS_P (x
, reg_ok_strict
))
2004 if ((GET_CODE (x
) == PRE_INC
|| GET_CODE (x
) == PRE_DEC
)
2006 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x
, 0), reg_ok_strict
))
2008 if (LEGITIMATE_SMALL_DATA_P (mode
, x
))
2010 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x
))
2012 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2014 && GET_CODE (x
) == PLUS
2015 && GET_CODE (XEXP (x
, 0)) == REG
2016 && XEXP (x
, 0) == virtual_stack_vars_rtx
2017 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2019 if (LEGITIMATE_OFFSET_ADDRESS_P (mode
, x
, reg_ok_strict
))
2022 && (TARGET_HARD_FLOAT
|| TARGET_POWERPC64
|| mode
!= DFmode
)
2023 && (TARGET_POWERPC64
|| mode
!= DImode
)
2024 && LEGITIMATE_INDEXED_ADDRESS_P (x
, reg_ok_strict
))
2026 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode
, x
, reg_ok_strict
))
2031 /* Try to output insns to set TARGET equal to the constant C if it can
2032 be done in less than N insns. Do all computations in MODE.
2033 Returns the place where the output has been placed if it can be
2034 done and the insns have been emitted. If it would take more than N
2035 insns, zero is returned and no insns and emitted. */
2038 rs6000_emit_set_const (dest
, mode
, source
, n
)
2040 enum machine_mode mode
;
2041 int n ATTRIBUTE_UNUSED
;
2043 HOST_WIDE_INT c0
, c1
;
2045 if (mode
== QImode
|| mode
== HImode
|| mode
== SImode
)
2048 dest
= gen_reg_rtx (mode
);
2049 emit_insn (gen_rtx_SET (VOIDmode
, dest
, source
));
2053 if (GET_CODE (source
) == CONST_INT
)
2055 c0
= INTVAL (source
);
2058 else if (GET_CODE (source
) == CONST_DOUBLE
)
2060 #if HOST_BITS_PER_WIDE_INT >= 64
2061 c0
= CONST_DOUBLE_LOW (source
);
2064 c0
= CONST_DOUBLE_LOW (source
);
2065 c1
= CONST_DOUBLE_HIGH (source
);
2071 return rs6000_emit_set_long_const (dest
, c0
, c1
);
2074 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2075 fall back to a straight forward decomposition. We do this to avoid
2076 exponential run times encountered when looking for longer sequences
2077 with rs6000_emit_set_const. */
2079 rs6000_emit_set_long_const (dest
, c1
, c2
)
2081 HOST_WIDE_INT c1
, c2
;
2083 if (!TARGET_POWERPC64
)
2085 rtx operand1
, operand2
;
2087 operand1
= operand_subword_force (dest
, WORDS_BIG_ENDIAN
== 0,
2089 operand2
= operand_subword_force (dest
, WORDS_BIG_ENDIAN
!= 0,
2091 emit_move_insn (operand1
, GEN_INT (c1
));
2092 emit_move_insn (operand2
, GEN_INT (c2
));
2096 HOST_WIDE_INT ud1
, ud2
, ud3
, ud4
;
2099 ud2
= (c1
& 0xffff0000) >> 16;
2100 #if HOST_BITS_PER_WIDE_INT >= 64
2104 ud4
= (c2
& 0xffff0000) >> 16;
2106 if ((ud4
== 0xffff && ud3
== 0xffff && ud2
== 0xffff && (ud1
& 0x8000))
2107 || (ud4
== 0 && ud3
== 0 && ud2
== 0 && ! (ud1
& 0x8000)))
2110 emit_move_insn (dest
, GEN_INT (((ud1
^ 0x8000) - 0x8000)));
2112 emit_move_insn (dest
, GEN_INT (ud1
));
2115 else if ((ud4
== 0xffff && ud3
== 0xffff && (ud2
& 0x8000))
2116 || (ud4
== 0 && ud3
== 0 && ! (ud2
& 0x8000)))
2119 emit_move_insn (dest
, GEN_INT (((ud2
<< 16) ^ 0x80000000)
2122 emit_move_insn (dest
, GEN_INT (ud2
<< 16));
2124 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
2126 else if ((ud4
== 0xffff && (ud3
& 0x8000))
2127 || (ud4
== 0 && ! (ud3
& 0x8000)))
2130 emit_move_insn (dest
, GEN_INT (((ud3
<< 16) ^ 0x80000000)
2133 emit_move_insn (dest
, GEN_INT (ud3
<< 16));
2136 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud2
)));
2137 emit_move_insn (dest
, gen_rtx_ASHIFT (DImode
, dest
, GEN_INT (16)));
2139 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
2144 emit_move_insn (dest
, GEN_INT (((ud4
<< 16) ^ 0x80000000)
2147 emit_move_insn (dest
, GEN_INT (ud4
<< 16));
2150 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud3
)));
2152 emit_move_insn (dest
, gen_rtx_ASHIFT (DImode
, dest
, GEN_INT (32)));
2154 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
,
2155 GEN_INT (ud2
<< 16)));
2157 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
2163 /* Emit a move from SOURCE to DEST in mode MODE. */
2165 rs6000_emit_move (dest
, source
, mode
)
2168 enum machine_mode mode
;
2172 operands
[1] = source
;
2174 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2175 if (GET_CODE (operands
[1]) == CONST_DOUBLE
2176 && ! FLOAT_MODE_P (mode
)
2177 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
2179 /* FIXME. This should never happen. */
2180 /* Since it seems that it does, do the safe thing and convert
2183 GEN_INT (trunc_int_for_mode (CONST_DOUBLE_LOW (operands
[1]), mode
));
2185 if (GET_CODE (operands
[1]) == CONST_DOUBLE
2186 && ! FLOAT_MODE_P (mode
)
2187 && ((CONST_DOUBLE_HIGH (operands
[1]) == 0
2188 && CONST_DOUBLE_LOW (operands
[1]) >= 0)
2189 || (CONST_DOUBLE_HIGH (operands
[1]) == -1
2190 && CONST_DOUBLE_LOW (operands
[1]) < 0)))
2193 /* Check if GCC is setting up a block move that will end up using FP
2194 registers as temporaries. We must make sure this is acceptable. */
2195 if (GET_CODE (operands
[0]) == MEM
2196 && GET_CODE (operands
[1]) == MEM
2198 && (SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[0]))
2199 || SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[1])))
2200 && ! (SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[0]) > 32
2201 ? 32 : MEM_ALIGN (operands
[0])))
2202 || SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[1]) > 32
2204 : MEM_ALIGN (operands
[1]))))
2205 && ! MEM_VOLATILE_P (operands
[0])
2206 && ! MEM_VOLATILE_P (operands
[1]))
2208 emit_move_insn (adjust_address (operands
[0], SImode
, 0),
2209 adjust_address (operands
[1], SImode
, 0));
2210 emit_move_insn (adjust_address (operands
[0], SImode
, 4),
2211 adjust_address (operands
[1], SImode
, 4));
2215 if (! no_new_pseudos
&& GET_CODE (operands
[0]) != REG
)
2216 operands
[1] = force_reg (mode
, operands
[1]);
2218 if (mode
== SFmode
&& ! TARGET_POWERPC
&& TARGET_HARD_FLOAT
2219 && GET_CODE (operands
[0]) == MEM
)
2223 if (reload_in_progress
|| reload_completed
)
2224 regnum
= true_regnum (operands
[1]);
2225 else if (GET_CODE (operands
[1]) == REG
)
2226 regnum
= REGNO (operands
[1]);
2230 /* If operands[1] is a register, on POWER it may have
2231 double-precision data in it, so truncate it to single
2233 if (FP_REGNO_P (regnum
) || regnum
>= FIRST_PSEUDO_REGISTER
)
2236 newreg
= (no_new_pseudos
? operands
[1] : gen_reg_rtx (mode
));
2237 emit_insn (gen_aux_truncdfsf2 (newreg
, operands
[1]));
2238 operands
[1] = newreg
;
2242 /* Handle the case where reload calls us with an invalid address;
2243 and the case of CONSTANT_P_RTX. */
2244 if (!VECTOR_MODE_P (mode
)
2245 && (! general_operand (operands
[1], mode
)
2246 || ! nonimmediate_operand (operands
[0], mode
)
2247 || GET_CODE (operands
[1]) == CONSTANT_P_RTX
))
2249 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2253 /* FIXME: In the long term, this switch statement should go away
2254 and be replaced by a sequence of tests based on things like
2260 if (CONSTANT_P (operands
[1])
2261 && GET_CODE (operands
[1]) != CONST_INT
)
2262 operands
[1] = force_const_mem (mode
, operands
[1]);
2268 if (CONSTANT_P (operands
[1])
2269 && ! easy_fp_constant (operands
[1], mode
))
2270 operands
[1] = force_const_mem (mode
, operands
[1]);
2277 if (CONSTANT_P (operands
[1])
2278 && !easy_vector_constant (operands
[1]))
2279 operands
[1] = force_const_mem (mode
, operands
[1]);
2284 /* Use default pattern for address of ELF small data */
2287 && DEFAULT_ABI
== ABI_V4
2288 && (GET_CODE (operands
[1]) == SYMBOL_REF
2289 || GET_CODE (operands
[1]) == CONST
)
2290 && small_data_operand (operands
[1], mode
))
2292 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2296 if (DEFAULT_ABI
== ABI_V4
2297 && mode
== Pmode
&& mode
== SImode
2298 && flag_pic
== 1 && got_operand (operands
[1], mode
))
2300 emit_insn (gen_movsi_got (operands
[0], operands
[1]));
2304 if ((TARGET_ELF
|| DEFAULT_ABI
== ABI_DARWIN
)
2305 && TARGET_NO_TOC
&& ! flag_pic
2307 && CONSTANT_P (operands
[1])
2308 && GET_CODE (operands
[1]) != HIGH
2309 && GET_CODE (operands
[1]) != CONST_INT
)
2311 rtx target
= (no_new_pseudos
? operands
[0] : gen_reg_rtx (mode
));
2313 /* If this is a function address on -mcall-aixdesc,
2314 convert it to the address of the descriptor. */
2315 if (DEFAULT_ABI
== ABI_AIX
2316 && GET_CODE (operands
[1]) == SYMBOL_REF
2317 && XSTR (operands
[1], 0)[0] == '.')
2319 const char *name
= XSTR (operands
[1], 0);
2321 while (*name
== '.')
2323 new_ref
= gen_rtx_SYMBOL_REF (Pmode
, name
);
2324 CONSTANT_POOL_ADDRESS_P (new_ref
)
2325 = CONSTANT_POOL_ADDRESS_P (operands
[1]);
2326 SYMBOL_REF_FLAG (new_ref
) = SYMBOL_REF_FLAG (operands
[1]);
2327 SYMBOL_REF_USED (new_ref
) = SYMBOL_REF_USED (operands
[1]);
2328 operands
[1] = new_ref
;
2331 if (DEFAULT_ABI
== ABI_DARWIN
)
2333 emit_insn (gen_macho_high (target
, operands
[1]));
2334 emit_insn (gen_macho_low (operands
[0], target
, operands
[1]));
2338 emit_insn (gen_elf_high (target
, operands
[1]));
2339 emit_insn (gen_elf_low (operands
[0], target
, operands
[1]));
2343 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2344 and we have put it in the TOC, we just need to make a TOC-relative
2347 && GET_CODE (operands
[1]) == SYMBOL_REF
2348 && CONSTANT_POOL_EXPR_P (operands
[1])
2349 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands
[1]),
2350 get_pool_mode (operands
[1])))
2352 operands
[1] = create_TOC_reference (operands
[1]);
2354 else if (mode
== Pmode
2355 && CONSTANT_P (operands
[1])
2356 && ((GET_CODE (operands
[1]) != CONST_INT
2357 && ! easy_fp_constant (operands
[1], mode
))
2358 || (GET_CODE (operands
[1]) == CONST_INT
2359 && num_insns_constant (operands
[1], mode
) > 2)
2360 || (GET_CODE (operands
[0]) == REG
2361 && FP_REGNO_P (REGNO (operands
[0]))))
2362 && GET_CODE (operands
[1]) != HIGH
2363 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands
[1])
2364 && ! TOC_RELATIVE_EXPR_P (operands
[1]))
2366 /* Emit a USE operation so that the constant isn't deleted if
2367 expensive optimizations are turned on because nobody
2368 references it. This should only be done for operands that
2369 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2370 This should not be done for operands that contain LABEL_REFs.
2371 For now, we just handle the obvious case. */
2372 if (GET_CODE (operands
[1]) != LABEL_REF
)
2373 emit_insn (gen_rtx_USE (VOIDmode
, operands
[1]));
2376 /* Darwin uses a special PIC legitimizer. */
2377 if (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
)
2380 rs6000_machopic_legitimize_pic_address (operands
[1], mode
,
2382 if (operands
[0] != operands
[1])
2383 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2388 /* If we are to limit the number of things we put in the TOC and
2389 this is a symbol plus a constant we can add in one insn,
2390 just put the symbol in the TOC and add the constant. Don't do
2391 this if reload is in progress. */
2392 if (GET_CODE (operands
[1]) == CONST
2393 && TARGET_NO_SUM_IN_TOC
&& ! reload_in_progress
2394 && GET_CODE (XEXP (operands
[1], 0)) == PLUS
2395 && add_operand (XEXP (XEXP (operands
[1], 0), 1), mode
)
2396 && (GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == LABEL_REF
2397 || GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == SYMBOL_REF
)
2398 && ! side_effects_p (operands
[0]))
2401 force_const_mem (mode
, XEXP (XEXP (operands
[1], 0), 0));
2402 rtx other
= XEXP (XEXP (operands
[1], 0), 1);
2404 sym
= force_reg (mode
, sym
);
2406 emit_insn (gen_addsi3 (operands
[0], sym
, other
));
2408 emit_insn (gen_adddi3 (operands
[0], sym
, other
));
2412 operands
[1] = force_const_mem (mode
, operands
[1]);
2415 && CONSTANT_POOL_EXPR_P (XEXP (operands
[1], 0))
2416 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2417 get_pool_constant (XEXP (operands
[1], 0)),
2418 get_pool_mode (XEXP (operands
[1], 0))))
2421 = gen_rtx_MEM (mode
,
2422 create_TOC_reference (XEXP (operands
[1], 0)));
2423 set_mem_alias_set (operands
[1], get_TOC_alias_set ());
2424 RTX_UNCHANGING_P (operands
[1]) = 1;
2430 if (GET_CODE (operands
[0]) == MEM
2431 && GET_CODE (XEXP (operands
[0], 0)) != REG
2432 && ! reload_in_progress
)
2434 = replace_equiv_address (operands
[0],
2435 copy_addr_to_reg (XEXP (operands
[0], 0)));
2437 if (GET_CODE (operands
[1]) == MEM
2438 && GET_CODE (XEXP (operands
[1], 0)) != REG
2439 && ! reload_in_progress
)
2441 = replace_equiv_address (operands
[1],
2442 copy_addr_to_reg (XEXP (operands
[1], 0)));
2449 /* Above, we may have called force_const_mem which may have returned
2450 an invalid address. If we can, fix this up; otherwise, reload will
2451 have to deal with it. */
2452 if (GET_CODE (operands
[1]) == MEM
2453 && ! memory_address_p (mode
, XEXP (operands
[1], 0))
2454 && ! reload_in_progress
)
2455 operands
[1] = adjust_address (operands
[1], mode
, 0);
2457 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
2461 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2462 for a call to a function whose data type is FNTYPE.
2463 For a library call, FNTYPE is 0.
2465 For incoming args we set the number of arguments in the prototype large
2466 so we never return a PARALLEL. */
2469 init_cumulative_args (cum
, fntype
, libname
, incoming
)
2470 CUMULATIVE_ARGS
*cum
;
2472 rtx libname ATTRIBUTE_UNUSED
;
2475 static CUMULATIVE_ARGS zero_cumulative
;
2477 *cum
= zero_cumulative
;
2479 cum
->fregno
= FP_ARG_MIN_REG
;
2480 cum
->vregno
= ALTIVEC_ARG_MIN_REG
;
2481 cum
->prototype
= (fntype
&& TYPE_ARG_TYPES (fntype
));
2482 cum
->call_cookie
= CALL_NORMAL
;
2483 cum
->sysv_gregno
= GP_ARG_MIN_REG
;
2486 cum
->nargs_prototype
= 1000; /* don't return a PARALLEL */
2488 else if (cum
->prototype
)
2489 cum
->nargs_prototype
= (list_length (TYPE_ARG_TYPES (fntype
)) - 1
2490 + (TYPE_MODE (TREE_TYPE (fntype
)) == BLKmode
2491 || RETURN_IN_MEMORY (TREE_TYPE (fntype
))));
2494 cum
->nargs_prototype
= 0;
2496 cum
->orig_nargs
= cum
->nargs_prototype
;
2498 /* Check for longcall's */
2499 if (fntype
&& lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype
)))
2500 cum
->call_cookie
= CALL_LONG
;
2502 if (TARGET_DEBUG_ARG
)
2504 fprintf (stderr
, "\ninit_cumulative_args:");
2507 tree ret_type
= TREE_TYPE (fntype
);
2508 fprintf (stderr
, " ret code = %s,",
2509 tree_code_name
[ (int)TREE_CODE (ret_type
) ]);
2512 if (cum
->call_cookie
& CALL_LONG
)
2513 fprintf (stderr
, " longcall,");
2515 fprintf (stderr
, " proto = %d, nargs = %d\n",
2516 cum
->prototype
, cum
->nargs_prototype
);
2520 /* If defined, a C expression which determines whether, and in which
2521 direction, to pad out an argument with extra space. The value
2522 should be of type `enum direction': either `upward' to pad above
2523 the argument, `downward' to pad below, or `none' to inhibit
2526 For the AIX ABI structs are always stored left shifted in their
2530 function_arg_padding (mode
, type
)
2531 enum machine_mode mode
;
2534 if (type
!= 0 && AGGREGATE_TYPE_P (type
))
2537 /* This is the default definition. */
2538 return (! BYTES_BIG_ENDIAN
2541 ? (type
&& TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
2542 && int_size_in_bytes (type
) < (PARM_BOUNDARY
/ BITS_PER_UNIT
))
2543 : GET_MODE_BITSIZE (mode
) < PARM_BOUNDARY
)
2544 ? downward
: upward
));
2547 /* If defined, a C expression that gives the alignment boundary, in bits,
2548 of an argument with the specified mode and type. If it is not defined,
2549 PARM_BOUNDARY is used for all arguments.
2551 V.4 wants long longs to be double word aligned. */
2554 function_arg_boundary (mode
, type
)
2555 enum machine_mode mode
;
2556 tree type ATTRIBUTE_UNUSED
;
2558 if (DEFAULT_ABI
== ABI_V4
&& (mode
== DImode
|| mode
== DFmode
))
2560 else if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
2563 return PARM_BOUNDARY
;
2566 /* Update the data in CUM to advance over an argument
2567 of mode MODE and data type TYPE.
2568 (TYPE is null for libcalls where that information may not be available.) */
2571 function_arg_advance (cum
, mode
, type
, named
)
2572 CUMULATIVE_ARGS
*cum
;
2573 enum machine_mode mode
;
2577 cum
->nargs_prototype
--;
2579 if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
2581 if (cum
->vregno
<= ALTIVEC_ARG_MAX_REG
&& cum
->nargs_prototype
>= 0)
2584 cum
->words
+= RS6000_ARG_SIZE (mode
, type
);
2586 else if (DEFAULT_ABI
== ABI_V4
)
2588 if (TARGET_HARD_FLOAT
2589 && (mode
== SFmode
|| mode
== DFmode
))
2591 if (cum
->fregno
<= FP_ARG_V4_MAX_REG
)
2596 cum
->words
+= cum
->words
& 1;
2597 cum
->words
+= RS6000_ARG_SIZE (mode
, type
);
2603 int gregno
= cum
->sysv_gregno
;
2605 /* Aggregates and IEEE quad get passed by reference. */
2606 if ((type
&& AGGREGATE_TYPE_P (type
))
2610 n_words
= RS6000_ARG_SIZE (mode
, type
);
2612 /* Long long is put in odd registers. */
2613 if (n_words
== 2 && (gregno
& 1) == 0)
2616 /* Long long is not split between registers and stack. */
2617 if (gregno
+ n_words
- 1 > GP_ARG_MAX_REG
)
2619 /* Long long is aligned on the stack. */
2621 cum
->words
+= cum
->words
& 1;
2622 cum
->words
+= n_words
;
2625 /* Note: continuing to accumulate gregno past when we've started
2626 spilling to the stack indicates the fact that we've started
2627 spilling to the stack to expand_builtin_saveregs. */
2628 cum
->sysv_gregno
= gregno
+ n_words
;
2631 if (TARGET_DEBUG_ARG
)
2633 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
2634 cum
->words
, cum
->fregno
);
2635 fprintf (stderr
, "gregno = %2d, nargs = %4d, proto = %d, ",
2636 cum
->sysv_gregno
, cum
->nargs_prototype
, cum
->prototype
);
2637 fprintf (stderr
, "mode = %4s, named = %d\n",
2638 GET_MODE_NAME (mode
), named
);
2643 int align
= (TARGET_32BIT
&& (cum
->words
& 1) != 0
2644 && function_arg_boundary (mode
, type
) == 64) ? 1 : 0;
2646 cum
->words
+= align
+ RS6000_ARG_SIZE (mode
, type
);
2648 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
&& TARGET_HARD_FLOAT
)
2651 if (TARGET_DEBUG_ARG
)
2653 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
2654 cum
->words
, cum
->fregno
);
2655 fprintf (stderr
, "nargs = %4d, proto = %d, mode = %4s, ",
2656 cum
->nargs_prototype
, cum
->prototype
, GET_MODE_NAME (mode
));
2657 fprintf (stderr
, "named = %d, align = %d\n", named
, align
);
2662 /* Determine where to put an argument to a function.
2663 Value is zero to push the argument on the stack,
2664 or a hard register in which to store the argument.
2666 MODE is the argument's machine mode.
2667 TYPE is the data type of the argument (as a tree).
2668 This is null for libcalls where that information may
2670 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2671 the preceding args and about the function being called.
2672 NAMED is nonzero if this argument is a named parameter
2673 (otherwise it is an extra parameter matching an ellipsis).
2675 On RS/6000 the first eight words of non-FP are normally in registers
2676 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
2677 Under V.4, the first 8 FP args are in registers.
2679 If this is floating-point and no prototype is specified, we use
2680 both an FP and integer register (or possibly FP reg and stack). Library
2681 functions (when TYPE is zero) always have the proper types for args,
2682 so we can pass the FP value just in one register. emit_library_function
2683 doesn't support PARALLEL anyway. */
2686 function_arg (cum
, mode
, type
, named
)
2687 CUMULATIVE_ARGS
*cum
;
2688 enum machine_mode mode
;
2692 enum rs6000_abi abi
= DEFAULT_ABI
;
2694 /* Return a marker to indicate whether CR1 needs to set or clear the
2695 bit that V.4 uses to say fp args were passed in registers.
2696 Assume that we don't need the marker for software floating point,
2697 or compiler generated library calls. */
2698 if (mode
== VOIDmode
)
2701 && TARGET_HARD_FLOAT
2702 && cum
->nargs_prototype
< 0
2703 && type
&& (cum
->prototype
|| TARGET_NO_PROTOTYPE
))
2705 return GEN_INT (cum
->call_cookie
2706 | ((cum
->fregno
== FP_ARG_MIN_REG
)
2707 ? CALL_V4_SET_FP_ARGS
2708 : CALL_V4_CLEAR_FP_ARGS
));
2711 return GEN_INT (cum
->call_cookie
);
2714 if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
2716 if (named
&& cum
->vregno
<= ALTIVEC_ARG_MAX_REG
)
2717 return gen_rtx_REG (mode
, cum
->vregno
);
2721 else if (abi
== ABI_V4
)
2723 if (TARGET_HARD_FLOAT
2724 && (mode
== SFmode
|| mode
== DFmode
))
2726 if (cum
->fregno
<= FP_ARG_V4_MAX_REG
)
2727 return gen_rtx_REG (mode
, cum
->fregno
);
2734 int gregno
= cum
->sysv_gregno
;
2736 /* Aggregates and IEEE quad get passed by reference. */
2737 if ((type
&& AGGREGATE_TYPE_P (type
))
2741 n_words
= RS6000_ARG_SIZE (mode
, type
);
2743 /* Long long is put in odd registers. */
2744 if (n_words
== 2 && (gregno
& 1) == 0)
2747 /* Long long is not split between registers and stack. */
2748 if (gregno
+ n_words
- 1 <= GP_ARG_MAX_REG
)
2749 return gen_rtx_REG (mode
, gregno
);
2756 int align
= (TARGET_32BIT
&& (cum
->words
& 1) != 0
2757 && function_arg_boundary (mode
, type
) == 64) ? 1 : 0;
2758 int align_words
= cum
->words
+ align
;
2760 if (type
&& TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
2763 if (USE_FP_FOR_ARG_P (*cum
, mode
, type
))
2766 || ((cum
->nargs_prototype
> 0)
2767 /* IBM AIX extended its linkage convention definition always
2768 to require FP args after register save area hole on the
2770 && (DEFAULT_ABI
!= ABI_AIX
2772 || (align_words
< GP_ARG_NUM_REG
))))
2773 return gen_rtx_REG (mode
, cum
->fregno
);
2775 return gen_rtx_PARALLEL (mode
,
2777 gen_rtx_EXPR_LIST (VOIDmode
,
2778 ((align_words
>= GP_ARG_NUM_REG
)
2781 + RS6000_ARG_SIZE (mode
, type
)
2783 /* If this is partially on the stack, then
2784 we only include the portion actually
2785 in registers here. */
2786 ? gen_rtx_REG (SImode
,
2787 GP_ARG_MIN_REG
+ align_words
)
2788 : gen_rtx_REG (mode
,
2789 GP_ARG_MIN_REG
+ align_words
))),
2791 gen_rtx_EXPR_LIST (VOIDmode
,
2792 gen_rtx_REG (mode
, cum
->fregno
),
2795 else if (align_words
< GP_ARG_NUM_REG
)
2796 return gen_rtx_REG (mode
, GP_ARG_MIN_REG
+ align_words
);
2802 /* For an arg passed partly in registers and partly in memory,
2803 this is the number of registers used.
2804 For args passed entirely in registers or entirely in memory, zero. */
2807 function_arg_partial_nregs (cum
, mode
, type
, named
)
2808 CUMULATIVE_ARGS
*cum
;
2809 enum machine_mode mode
;
2811 int named ATTRIBUTE_UNUSED
;
2813 if (DEFAULT_ABI
== ABI_V4
)
2816 if (USE_FP_FOR_ARG_P (*cum
, mode
, type
)
2817 || USE_ALTIVEC_FOR_ARG_P (*cum
, mode
, type
))
2819 if (cum
->nargs_prototype
>= 0)
2823 if (cum
->words
< GP_ARG_NUM_REG
2824 && GP_ARG_NUM_REG
< (cum
->words
+ RS6000_ARG_SIZE (mode
, type
)))
2826 int ret
= GP_ARG_NUM_REG
- cum
->words
;
2827 if (ret
&& TARGET_DEBUG_ARG
)
2828 fprintf (stderr
, "function_arg_partial_nregs: %d\n", ret
);
2836 /* A C expression that indicates when an argument must be passed by
2837 reference. If nonzero for an argument, a copy of that argument is
2838 made in memory and a pointer to the argument is passed instead of
2839 the argument itself. The pointer is passed in whatever way is
2840 appropriate for passing a pointer to that type.
2842 Under V.4, structures and unions are passed by reference. */
2845 function_arg_pass_by_reference (cum
, mode
, type
, named
)
2846 CUMULATIVE_ARGS
*cum ATTRIBUTE_UNUSED
;
2847 enum machine_mode mode ATTRIBUTE_UNUSED
;
2849 int named ATTRIBUTE_UNUSED
;
2851 if (DEFAULT_ABI
== ABI_V4
2852 && ((type
&& AGGREGATE_TYPE_P (type
))
2855 if (TARGET_DEBUG_ARG
)
2856 fprintf (stderr
, "function_arg_pass_by_reference: aggregate\n");
2864 /* Perform any needed actions needed for a function that is receiving a
2865 variable number of arguments.
2869 MODE and TYPE are the mode and type of the current parameter.
2871 PRETEND_SIZE is a variable that should be set to the amount of stack
2872 that must be pushed by the prolog to pretend that our caller pushed
2875 Normally, this macro will push all remaining incoming registers on the
2876 stack and set PRETEND_SIZE to the length of the registers pushed. */
2879 setup_incoming_varargs (cum
, mode
, type
, pretend_size
, no_rtl
)
2880 CUMULATIVE_ARGS
*cum
;
2881 enum machine_mode mode
;
2887 CUMULATIVE_ARGS next_cum
;
2888 int reg_size
= TARGET_32BIT
? 4 : 8;
2889 rtx save_area
= NULL_RTX
, mem
;
2890 int first_reg_offset
, set
;
2894 fntype
= TREE_TYPE (current_function_decl
);
2895 stdarg_p
= (TYPE_ARG_TYPES (fntype
) != 0
2896 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
2897 != void_type_node
));
2899 /* For varargs, we do not want to skip the dummy va_dcl argument.
2900 For stdargs, we do want to skip the last named argument. */
2903 function_arg_advance (&next_cum
, mode
, type
, 1);
2905 if (DEFAULT_ABI
== ABI_V4
)
2907 /* Indicate to allocate space on the stack for varargs save area. */
2908 /* ??? Does this really have to be located at a magic spot on the
2909 stack, or can we allocate this with assign_stack_local instead. */
2910 cfun
->machine
->sysv_varargs_p
= 1;
2912 save_area
= plus_constant (virtual_stack_vars_rtx
,
2913 - RS6000_VARARGS_SIZE
);
2915 first_reg_offset
= next_cum
.sysv_gregno
- GP_ARG_MIN_REG
;
2919 first_reg_offset
= next_cum
.words
;
2920 save_area
= virtual_incoming_args_rtx
;
2921 cfun
->machine
->sysv_varargs_p
= 0;
2923 if (MUST_PASS_IN_STACK (mode
, type
))
2924 first_reg_offset
+= RS6000_ARG_SIZE (TYPE_MODE (type
), type
);
2927 set
= get_varargs_alias_set ();
2928 if (! no_rtl
&& first_reg_offset
< GP_ARG_NUM_REG
)
2930 mem
= gen_rtx_MEM (BLKmode
,
2931 plus_constant (save_area
,
2932 first_reg_offset
* reg_size
)),
2933 set_mem_alias_set (mem
, set
);
2934 set_mem_align (mem
, BITS_PER_WORD
);
2937 (GP_ARG_MIN_REG
+ first_reg_offset
, mem
,
2938 GP_ARG_NUM_REG
- first_reg_offset
,
2939 (GP_ARG_NUM_REG
- first_reg_offset
) * UNITS_PER_WORD
);
2941 /* ??? Does ABI_V4 need this at all? */
2942 *pretend_size
= (GP_ARG_NUM_REG
- first_reg_offset
) * UNITS_PER_WORD
;
2945 /* Save FP registers if needed. */
2946 if (DEFAULT_ABI
== ABI_V4
2947 && TARGET_HARD_FLOAT
&& ! no_rtl
2948 && next_cum
.fregno
<= FP_ARG_V4_MAX_REG
)
2950 int fregno
= next_cum
.fregno
;
2951 rtx cr1
= gen_rtx_REG (CCmode
, CR1_REGNO
);
2952 rtx lab
= gen_label_rtx ();
2953 int off
= (GP_ARG_NUM_REG
* reg_size
) + ((fregno
- FP_ARG_MIN_REG
) * 8);
2955 emit_jump_insn (gen_rtx_SET (VOIDmode
,
2957 gen_rtx_IF_THEN_ELSE (VOIDmode
,
2958 gen_rtx_NE (VOIDmode
, cr1
,
2960 gen_rtx_LABEL_REF (VOIDmode
, lab
),
2963 while (fregno
<= FP_ARG_V4_MAX_REG
)
2965 mem
= gen_rtx_MEM (DFmode
, plus_constant (save_area
, off
));
2966 set_mem_alias_set (mem
, set
);
2967 emit_move_insn (mem
, gen_rtx_REG (DFmode
, fregno
));
2976 /* Create the va_list data type. */
2979 rs6000_build_va_list ()
2981 tree f_gpr
, f_fpr
, f_ovf
, f_sav
, record
, type_decl
;
2983 /* For AIX, prefer 'char *' because that's what the system
2984 header files like. */
2985 if (DEFAULT_ABI
!= ABI_V4
)
2986 return build_pointer_type (char_type_node
);
2988 record
= make_lang_type (RECORD_TYPE
);
2989 type_decl
= build_decl (TYPE_DECL
, get_identifier ("__va_list_tag"), record
);
2991 f_gpr
= build_decl (FIELD_DECL
, get_identifier ("gpr"),
2992 unsigned_char_type_node
);
2993 f_fpr
= build_decl (FIELD_DECL
, get_identifier ("fpr"),
2994 unsigned_char_type_node
);
2995 f_ovf
= build_decl (FIELD_DECL
, get_identifier ("overflow_arg_area"),
2997 f_sav
= build_decl (FIELD_DECL
, get_identifier ("reg_save_area"),
3000 DECL_FIELD_CONTEXT (f_gpr
) = record
;
3001 DECL_FIELD_CONTEXT (f_fpr
) = record
;
3002 DECL_FIELD_CONTEXT (f_ovf
) = record
;
3003 DECL_FIELD_CONTEXT (f_sav
) = record
;
3005 TREE_CHAIN (record
) = type_decl
;
3006 TYPE_NAME (record
) = type_decl
;
3007 TYPE_FIELDS (record
) = f_gpr
;
3008 TREE_CHAIN (f_gpr
) = f_fpr
;
3009 TREE_CHAIN (f_fpr
) = f_ovf
;
3010 TREE_CHAIN (f_ovf
) = f_sav
;
3012 layout_type (record
);
3014 /* The correct type is an array type of one element. */
3015 return build_array_type (record
, build_index_type (size_zero_node
));
3018 /* Implement va_start. */
3021 rs6000_va_start (stdarg_p
, valist
, nextarg
)
3026 HOST_WIDE_INT words
, n_gpr
, n_fpr
;
3027 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
3028 tree gpr
, fpr
, ovf
, sav
, t
;
3030 /* Only SVR4 needs something special. */
3031 if (DEFAULT_ABI
!= ABI_V4
)
3033 std_expand_builtin_va_start (stdarg_p
, valist
, nextarg
);
3037 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
3038 f_fpr
= TREE_CHAIN (f_gpr
);
3039 f_ovf
= TREE_CHAIN (f_fpr
);
3040 f_sav
= TREE_CHAIN (f_ovf
);
3042 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
3043 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
);
3044 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
);
3045 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
);
3046 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
);
3048 /* Count number of gp and fp argument registers used. */
3049 words
= current_function_args_info
.words
;
3050 n_gpr
= current_function_args_info
.sysv_gregno
- GP_ARG_MIN_REG
;
3051 n_fpr
= current_function_args_info
.fregno
- FP_ARG_MIN_REG
;
3053 if (TARGET_DEBUG_ARG
)
3055 fputs ("va_start: words = ", stderr
);
3056 fprintf (stderr
, HOST_WIDE_INT_PRINT_DEC
, words
);
3057 fputs (", n_gpr = ", stderr
);
3058 fprintf (stderr
, HOST_WIDE_INT_PRINT_DEC
, n_gpr
);
3059 fputs (", n_fpr = ", stderr
);
3060 fprintf (stderr
, HOST_WIDE_INT_PRINT_DEC
, n_fpr
);
3061 putc ('\n', stderr
);
3064 t
= build (MODIFY_EXPR
, TREE_TYPE (gpr
), gpr
, build_int_2 (n_gpr
, 0));
3065 TREE_SIDE_EFFECTS (t
) = 1;
3066 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3068 t
= build (MODIFY_EXPR
, TREE_TYPE (fpr
), fpr
, build_int_2 (n_fpr
, 0));
3069 TREE_SIDE_EFFECTS (t
) = 1;
3070 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3072 /* Find the overflow area. */
3073 t
= make_tree (TREE_TYPE (ovf
), virtual_incoming_args_rtx
);
3075 t
= build (PLUS_EXPR
, TREE_TYPE (ovf
), t
,
3076 build_int_2 (words
* UNITS_PER_WORD
, 0));
3077 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
3078 TREE_SIDE_EFFECTS (t
) = 1;
3079 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3081 /* Find the register save area. */
3082 t
= make_tree (TREE_TYPE (sav
), virtual_stack_vars_rtx
);
3083 t
= build (PLUS_EXPR
, TREE_TYPE (sav
), t
,
3084 build_int_2 (-RS6000_VARARGS_SIZE
, -1));
3085 t
= build (MODIFY_EXPR
, TREE_TYPE (sav
), sav
, t
);
3086 TREE_SIDE_EFFECTS (t
) = 1;
3087 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3090 /* Implement va_arg. */
3093 rs6000_va_arg (valist
, type
)
3096 tree f_gpr
, f_fpr
, f_ovf
, f_sav
;
3097 tree gpr
, fpr
, ovf
, sav
, reg
, t
, u
;
3098 int indirect_p
, size
, rsize
, n_reg
, sav_ofs
, sav_scale
;
3099 rtx lab_false
, lab_over
, addr_rtx
, r
;
3101 /* For AIX, the rule is that structures are passed left-aligned in
3102 their stack slot. However, GCC does not presently do this:
3103 structures which are the same size as integer types are passed
3104 right-aligned, as if they were in fact integers. This only
3105 matters for structures of size 1 or 2, or 4 when TARGET_64BIT. */
3106 if (DEFAULT_ABI
!= ABI_V4
)
3108 HOST_WIDE_INT align
, rounded_size
;
3109 enum machine_mode mode
;
3112 /* Compute the rounded size of the type. */
3113 align
= PARM_BOUNDARY
/ BITS_PER_UNIT
;
3114 rounded_size
= (((int_size_in_bytes (type
) + align
- 1) / align
)
3119 mode
= TYPE_MODE (type
);
3120 if (mode
!= BLKmode
)
3123 adj
= TREE_INT_CST_LOW (TYPE_SIZE (type
)) / BITS_PER_UNIT
;
3124 if (rounded_size
> align
)
3127 addr_tree
= build (PLUS_EXPR
, TREE_TYPE (addr_tree
), addr_tree
,
3128 build_int_2 (rounded_size
- adj
, 0));
3131 addr_rtx
= expand_expr (addr_tree
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
3132 addr_rtx
= copy_to_reg (addr_rtx
);
3134 /* Compute new value for AP. */
3135 t
= build (MODIFY_EXPR
, TREE_TYPE (valist
), valist
,
3136 build (PLUS_EXPR
, TREE_TYPE (valist
), valist
,
3137 build_int_2 (rounded_size
, 0)));
3138 TREE_SIDE_EFFECTS (t
) = 1;
3139 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3144 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
3145 f_fpr
= TREE_CHAIN (f_gpr
);
3146 f_ovf
= TREE_CHAIN (f_fpr
);
3147 f_sav
= TREE_CHAIN (f_ovf
);
3149 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
3150 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
);
3151 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
);
3152 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
);
3153 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
);
3155 size
= int_size_in_bytes (type
);
3156 rsize
= (size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
3158 if (AGGREGATE_TYPE_P (type
) || TYPE_MODE (type
) == TFmode
)
3160 /* Aggregates and long doubles are passed by reference. */
3166 size
= rsize
= UNITS_PER_WORD
;
3168 else if (FLOAT_TYPE_P (type
) && ! TARGET_SOFT_FLOAT
)
3170 /* FP args go in FP registers, if present. */
3179 /* Otherwise into GP registers. */
3187 /* Pull the value out of the saved registers ... */
3189 lab_false
= gen_label_rtx ();
3190 lab_over
= gen_label_rtx ();
3191 addr_rtx
= gen_reg_rtx (Pmode
);
3193 emit_cmp_and_jump_insns (expand_expr (reg
, NULL_RTX
, QImode
, EXPAND_NORMAL
),
3194 GEN_INT (8 - n_reg
+ 1), GE
, const1_rtx
, QImode
, 1,
3197 /* Long long is aligned in the registers. */
3200 u
= build (BIT_AND_EXPR
, TREE_TYPE (reg
), reg
,
3201 build_int_2 (n_reg
- 1, 0));
3202 u
= build (PLUS_EXPR
, TREE_TYPE (reg
), reg
, u
);
3203 u
= build (MODIFY_EXPR
, TREE_TYPE (reg
), reg
, u
);
3204 TREE_SIDE_EFFECTS (u
) = 1;
3205 expand_expr (u
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3209 t
= build (PLUS_EXPR
, ptr_type_node
, sav
, build_int_2 (sav_ofs
, 0));
3213 u
= build (POSTINCREMENT_EXPR
, TREE_TYPE (reg
), reg
, build_int_2 (n_reg
, 0));
3214 TREE_SIDE_EFFECTS (u
) = 1;
3216 u
= build1 (CONVERT_EXPR
, integer_type_node
, u
);
3217 TREE_SIDE_EFFECTS (u
) = 1;
3219 u
= build (MULT_EXPR
, integer_type_node
, u
, build_int_2 (sav_scale
, 0));
3220 TREE_SIDE_EFFECTS (u
) = 1;
3222 t
= build (PLUS_EXPR
, ptr_type_node
, t
, u
);
3223 TREE_SIDE_EFFECTS (t
) = 1;
3225 r
= expand_expr (t
, addr_rtx
, Pmode
, EXPAND_NORMAL
);
3227 emit_move_insn (addr_rtx
, r
);
3229 emit_jump_insn (gen_jump (lab_over
));
3231 emit_label (lab_false
);
3233 /* ... otherwise out of the overflow area. */
3235 /* Make sure we don't find reg 7 for the next int arg. */
3238 t
= build (MODIFY_EXPR
, TREE_TYPE (reg
), reg
, build_int_2 (8, 0));
3239 TREE_SIDE_EFFECTS (t
) = 1;
3240 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3243 /* Care for on-stack alignment if needed. */
3248 t
= build (PLUS_EXPR
, TREE_TYPE (ovf
), ovf
, build_int_2 (7, 0));
3249 t
= build (BIT_AND_EXPR
, TREE_TYPE (t
), t
, build_int_2 (-8, -1));
3253 r
= expand_expr (t
, addr_rtx
, Pmode
, EXPAND_NORMAL
);
3255 emit_move_insn (addr_rtx
, r
);
3257 t
= build (PLUS_EXPR
, TREE_TYPE (t
), t
, build_int_2 (size
, 0));
3258 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
3259 TREE_SIDE_EFFECTS (t
) = 1;
3260 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3262 emit_label (lab_over
);
3266 r
= gen_rtx_MEM (Pmode
, addr_rtx
);
3267 set_mem_alias_set (r
, get_varargs_alias_set ());
3268 emit_move_insn (addr_rtx
, r
);
3276 #define def_builtin(MASK, NAME, TYPE, CODE) \
3278 if ((MASK) & target_flags) \
3279 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL); \
3282 struct builtin_description
3284 const unsigned int mask
;
3285 const enum insn_code icode
;
3286 const char *const name
;
3287 const enum rs6000_builtins code
;
3290 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3292 static const struct builtin_description bdesc_3arg
[] =
3294 { MASK_ALTIVEC
, CODE_FOR_altivec_vmaddfp
, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP
},
3295 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhaddshs
, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS
},
3296 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhraddshs
, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS
},
3297 { MASK_ALTIVEC
, CODE_FOR_altivec_vmladduhm
, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM
},
3298 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumubm
, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM
},
3299 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsummbm
, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM
},
3300 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhm
, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM
},
3301 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshm
, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM
},
3302 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhs
, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS
},
3303 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshs
, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS
},
3304 { MASK_ALTIVEC
, CODE_FOR_altivec_vnmsubfp
, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP
},
3305 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_4sf
, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF
},
3306 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_4si
, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI
},
3307 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_8hi
, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI
},
3308 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_16qi
, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI
},
3309 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_4sf
, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF
},
3310 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_4si
, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI
},
3311 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_8hi
, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI
},
3312 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_16qi
, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI
},
3313 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_16qi
, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI
},
3314 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_8hi
, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI
},
3315 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_4si
, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI
},
3316 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_4sf
, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF
},
3319 /* DST operations: void foo (void *, const int, const char). */
3321 static const struct builtin_description bdesc_dst
[] =
3323 { MASK_ALTIVEC
, CODE_FOR_altivec_dst
, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST
},
3324 { MASK_ALTIVEC
, CODE_FOR_altivec_dstt
, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT
},
3325 { MASK_ALTIVEC
, CODE_FOR_altivec_dstst
, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST
},
3326 { MASK_ALTIVEC
, CODE_FOR_altivec_dststt
, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT
}
3329 /* Simple binary operations: VECc = foo (VECa, VECb). */
3331 static const struct builtin_description bdesc_2arg
[] =
3333 { MASK_ALTIVEC
, CODE_FOR_addv16qi3
, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM
},
3334 { MASK_ALTIVEC
, CODE_FOR_addv8hi3
, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM
},
3335 { MASK_ALTIVEC
, CODE_FOR_addv4si3
, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM
},
3336 { MASK_ALTIVEC
, CODE_FOR_addv4sf3
, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP
},
3337 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddcuw
, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW
},
3338 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddubs
, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS
},
3339 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsbs
, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS
},
3340 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduhs
, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS
},
3341 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddshs
, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS
},
3342 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduws
, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS
},
3343 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsws
, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS
},
3344 { MASK_ALTIVEC
, CODE_FOR_andv4si3
, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND
},
3345 { MASK_ALTIVEC
, CODE_FOR_altivec_vandc
, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC
},
3346 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgub
, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB
},
3347 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsb
, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB
},
3348 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguh
, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH
},
3349 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsh
, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH
},
3350 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguw
, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW
},
3351 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsw
, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW
},
3352 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfux
, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX
},
3353 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfsx
, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX
},
3354 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpbfp
, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP
},
3355 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequb
, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB
},
3356 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequh
, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH
},
3357 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequw
, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW
},
3358 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpeqfp
, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP
},
3359 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgefp
, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP
},
3360 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtub
, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB
},
3361 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsb
, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB
},
3362 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuh
, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH
},
3363 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsh
, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH
},
3364 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuw
, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW
},
3365 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsw
, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW
},
3366 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtfp
, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP
},
3367 { MASK_ALTIVEC
, CODE_FOR_altivec_vctsxs
, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS
},
3368 { MASK_ALTIVEC
, CODE_FOR_altivec_vctuxs
, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS
},
3369 { MASK_ALTIVEC
, CODE_FOR_umaxv16qi3
, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB
},
3370 { MASK_ALTIVEC
, CODE_FOR_smaxv16qi3
, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB
},
3371 { MASK_ALTIVEC
, CODE_FOR_uminv8hi3
, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH
},
3372 { MASK_ALTIVEC
, CODE_FOR_sminv8hi3
, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH
},
3373 { MASK_ALTIVEC
, CODE_FOR_uminv4si3
, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW
},
3374 { MASK_ALTIVEC
, CODE_FOR_sminv4si3
, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW
},
3375 { MASK_ALTIVEC
, CODE_FOR_sminv4sf3
, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP
},
3376 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghb
, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB
},
3377 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghh
, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH
},
3378 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghw
, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW
},
3379 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglb
, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB
},
3380 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglh
, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH
},
3381 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglw
, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW
},
3382 { MASK_ALTIVEC
, CODE_FOR_uminv16qi3
, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB
},
3383 { MASK_ALTIVEC
, CODE_FOR_sminv16qi3
, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB
},
3384 { MASK_ALTIVEC
, CODE_FOR_uminv8hi3
, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH
},
3385 { MASK_ALTIVEC
, CODE_FOR_sminv8hi3
, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH
},
3386 { MASK_ALTIVEC
, CODE_FOR_uminv4si3
, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW
},
3387 { MASK_ALTIVEC
, CODE_FOR_sminv4si3
, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW
},
3388 { MASK_ALTIVEC
, CODE_FOR_sminv4sf3
, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP
},
3389 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleub
, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB
},
3390 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesb
, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB
},
3391 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleuh
, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH
},
3392 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesh
, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH
},
3393 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuloub
, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB
},
3394 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosb
, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB
},
3395 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulouh
, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH
},
3396 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosh
, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH
},
3397 { MASK_ALTIVEC
, CODE_FOR_altivec_vnor
, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR
},
3398 { MASK_ALTIVEC
, CODE_FOR_iorv4si3
, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR
},
3399 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhum
, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM
},
3400 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwum
, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM
},
3401 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkpx
, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX
},
3402 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhss
, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS
},
3403 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshss
, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS
},
3404 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwss
, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS
},
3405 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswss
, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS
},
3406 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhus
, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS
},
3407 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshus
, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS
},
3408 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwus
, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS
},
3409 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswus
, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS
},
3410 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlb
, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB
},
3411 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlh
, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH
},
3412 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlw
, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW
},
3413 { MASK_ALTIVEC
, CODE_FOR_altivec_vslb
, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB
},
3414 { MASK_ALTIVEC
, CODE_FOR_altivec_vslh
, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH
},
3415 { MASK_ALTIVEC
, CODE_FOR_altivec_vslw
, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW
},
3416 { MASK_ALTIVEC
, CODE_FOR_altivec_vsl
, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL
},
3417 { MASK_ALTIVEC
, CODE_FOR_altivec_vslo
, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO
},
3418 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltb
, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB
},
3419 { MASK_ALTIVEC
, CODE_FOR_altivec_vsplth
, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH
},
3420 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltw
, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW
},
3421 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrb
, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB
},
3422 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrh
, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH
},
3423 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrw
, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW
},
3424 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrab
, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB
},
3425 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrah
, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH
},
3426 { MASK_ALTIVEC
, CODE_FOR_altivec_vsraw
, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW
},
3427 { MASK_ALTIVEC
, CODE_FOR_altivec_vsr
, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR
},
3428 { MASK_ALTIVEC
, CODE_FOR_altivec_vsro
, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO
},
3429 { MASK_ALTIVEC
, CODE_FOR_subv16qi3
, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM
},
3430 { MASK_ALTIVEC
, CODE_FOR_subv8hi3
, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM
},
3431 { MASK_ALTIVEC
, CODE_FOR_subv4si3
, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM
},
3432 { MASK_ALTIVEC
, CODE_FOR_subv4sf3
, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP
},
3433 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubcuw
, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW
},
3434 { MASK_ALTIVEC
, CODE_FOR_altivec_vsububs
, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS
},
3435 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsbs
, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS
},
3436 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuhs
, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS
},
3437 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubshs
, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS
},
3438 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuws
, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS
},
3439 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsws
, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS
},
3440 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4ubs
, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS
},
3441 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4sbs
, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS
},
3442 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4shs
, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS
},
3443 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum2sws
, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS
},
3444 { MASK_ALTIVEC
, CODE_FOR_altivec_vsumsws
, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS
},
3445 { MASK_ALTIVEC
, CODE_FOR_xorv4si3
, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR
},
3448 /* AltiVec predicates. */
3450 struct builtin_description_predicates
3452 const unsigned int mask
;
3453 const enum insn_code icode
;
3455 const char *const name
;
3456 const enum rs6000_builtins code
;
3459 static const struct builtin_description_predicates bdesc_altivec_preds
[] =
3461 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P
},
3462 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P
},
3463 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P
},
3464 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P
},
3465 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P
},
3466 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P
},
3467 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P
},
3468 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P
},
3469 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P
},
3470 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P
},
3471 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P
},
3472 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P
},
3473 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P
}
3476 /* ABS* opreations. */
3478 static const struct builtin_description bdesc_abs
[] =
3480 { MASK_ALTIVEC
, CODE_FOR_absv4si2
, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI
},
3481 { MASK_ALTIVEC
, CODE_FOR_absv8hi2
, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI
},
3482 { MASK_ALTIVEC
, CODE_FOR_absv4sf2
, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF
},
3483 { MASK_ALTIVEC
, CODE_FOR_absv16qi2
, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI
},
3484 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v4si
, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI
},
3485 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v8hi
, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI
},
3486 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v16qi
, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI
}
3489 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
3492 static const struct builtin_description bdesc_1arg
[] =
3494 { MASK_ALTIVEC
, CODE_FOR_altivec_vexptefp
, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP
},
3495 { MASK_ALTIVEC
, CODE_FOR_altivec_vlogefp
, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP
},
3496 { MASK_ALTIVEC
, CODE_FOR_altivec_vrefp
, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP
},
3497 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfim
, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM
},
3498 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfin
, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN
},
3499 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfip
, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP
},
3500 { MASK_ALTIVEC
, CODE_FOR_ftruncv4sf2
, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ
},
3501 { MASK_ALTIVEC
, CODE_FOR_altivec_vrsqrtefp
, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP
},
3502 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisb
, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB
},
3503 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltish
, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH
},
3504 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisw
, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW
},
3505 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsb
, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB
},
3506 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhpx
, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX
},
3507 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsh
, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH
},
3508 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsb
, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB
},
3509 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklpx
, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX
},
3510 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsh
, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH
},
3514 altivec_expand_unop_builtin (icode
, arglist
, target
)
3515 enum insn_code icode
;
3520 tree arg0
= TREE_VALUE (arglist
);
3521 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
3522 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
3523 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
3525 /* If we got invalid arguments bail out before generating bad rtl. */
3526 if (arg0
== error_mark_node
)
3530 || GET_MODE (target
) != tmode
3531 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
3532 target
= gen_reg_rtx (tmode
);
3534 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
3535 op0
= copy_to_mode_reg (mode0
, op0
);
3537 pat
= GEN_FCN (icode
) (target
, op0
);
3546 altivec_expand_abs_builtin (icode
, arglist
, target
)
3547 enum insn_code icode
;
3551 rtx pat
, scratch1
, scratch2
;
3552 tree arg0
= TREE_VALUE (arglist
);
3553 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
3554 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
3555 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
3557 /* If we have invalid arguments, bail out before generating bad rtl. */
3558 if (arg0
== error_mark_node
)
3562 || GET_MODE (target
) != tmode
3563 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
3564 target
= gen_reg_rtx (tmode
);
3566 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
3567 op0
= copy_to_mode_reg (mode0
, op0
);
3569 scratch1
= gen_reg_rtx (mode0
);
3570 scratch2
= gen_reg_rtx (mode0
);
3572 pat
= GEN_FCN (icode
) (target
, op0
, scratch1
, scratch2
);
3581 altivec_expand_binop_builtin (icode
, arglist
, target
)
3582 enum insn_code icode
;
3587 tree arg0
= TREE_VALUE (arglist
);
3588 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
3589 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
3590 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
3591 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
3592 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
3593 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
3595 /* If we got invalid arguments bail out before generating bad rtl. */
3596 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
3600 || GET_MODE (target
) != tmode
3601 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
3602 target
= gen_reg_rtx (tmode
);
3604 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
3605 op0
= copy_to_mode_reg (mode0
, op0
);
3606 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
3607 op1
= copy_to_mode_reg (mode1
, op1
);
3609 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
3618 altivec_expand_predicate_builtin (icode
, opcode
, arglist
, target
)
3619 enum insn_code icode
;
3625 tree cr6_form
= TREE_VALUE (arglist
);
3626 tree arg0
= TREE_VALUE (TREE_CHAIN (arglist
));
3627 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
3628 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
3629 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
3630 enum machine_mode tmode
= SImode
;
3631 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
3632 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
3635 if (TREE_CODE (cr6_form
) != INTEGER_CST
)
3637 error ("argument 1 of __builtin_altivec_predicate must be a constant");
3641 cr6_form_int
= TREE_INT_CST_LOW (cr6_form
);
3646 /* If we have invalid arguments, bail out before generating bad rtl. */
3647 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
3651 || GET_MODE (target
) != tmode
3652 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
3653 target
= gen_reg_rtx (tmode
);
3655 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
3656 op0
= copy_to_mode_reg (mode0
, op0
);
3657 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
3658 op1
= copy_to_mode_reg (mode1
, op1
);
3660 scratch
= gen_reg_rtx (mode0
);
3662 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
,
3663 gen_rtx (SYMBOL_REF
, Pmode
, opcode
));
3668 /* The vec_any* and vec_all* predicates use the same opcodes for two
3669 different operations, but the bits in CR6 will be different
3670 depending on what information we want. So we have to play tricks
3671 with CR6 to get the right bits out.
3673 If you think this is disgusting, look at the specs for the
3674 AltiVec predicates. */
3676 switch (cr6_form_int
)
3679 emit_insn (gen_cr6_test_for_zero (target
));
3682 emit_insn (gen_cr6_test_for_zero_reverse (target
));
3685 emit_insn (gen_cr6_test_for_lt (target
));
3688 emit_insn (gen_cr6_test_for_lt_reverse (target
));
3691 error ("argument 1 of __builtin_altivec_predicate is out of range");
3699 altivec_expand_stv_builtin (icode
, arglist
)
3700 enum insn_code icode
;
3703 tree arg0
= TREE_VALUE (arglist
);
3704 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
3705 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
3706 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
3707 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
3708 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
3710 enum machine_mode mode0
= insn_data
[icode
].operand
[0].mode
;
3711 enum machine_mode mode1
= insn_data
[icode
].operand
[1].mode
;
3712 enum machine_mode mode2
= insn_data
[icode
].operand
[2].mode
;
3714 /* Invalid arguments. Bail before doing anything stoopid! */
3715 if (arg0
== error_mark_node
3716 || arg1
== error_mark_node
3717 || arg2
== error_mark_node
)
3720 if (! (*insn_data
[icode
].operand
[2].predicate
) (op0
, mode2
))
3721 op0
= copy_to_mode_reg (mode2
, op0
);
3722 if (! (*insn_data
[icode
].operand
[0].predicate
) (op1
, mode0
))
3723 op1
= copy_to_mode_reg (mode0
, op1
);
3724 if (! (*insn_data
[icode
].operand
[1].predicate
) (op2
, mode1
))
3725 op2
= copy_to_mode_reg (mode1
, op2
);
3727 pat
= GEN_FCN (icode
) (op1
, op2
, op0
);
3734 altivec_expand_ternop_builtin (icode
, arglist
, target
)
3735 enum insn_code icode
;
3740 tree arg0
= TREE_VALUE (arglist
);
3741 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
3742 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
3743 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
3744 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
3745 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
3746 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
3747 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
3748 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
3749 enum machine_mode mode2
= insn_data
[icode
].operand
[3].mode
;
3751 /* If we got invalid arguments bail out before generating bad rtl. */
3752 if (arg0
== error_mark_node
3753 || arg1
== error_mark_node
3754 || arg2
== error_mark_node
)
3758 || GET_MODE (target
) != tmode
3759 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
3760 target
= gen_reg_rtx (tmode
);
3762 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
3763 op0
= copy_to_mode_reg (mode0
, op0
);
3764 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
3765 op1
= copy_to_mode_reg (mode1
, op1
);
3766 if (! (*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
))
3767 op2
= copy_to_mode_reg (mode2
, op2
);
3769 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
);
3777 altivec_expand_builtin (exp
, target
)
3781 struct builtin_description
*d
;
3782 struct builtin_description_predicates
*dp
;
3784 enum insn_code icode
;
3785 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
3786 tree arglist
= TREE_OPERAND (exp
, 1);
3787 tree arg0
, arg1
, arg2
;
3788 rtx op0
, op1
, op2
, pat
;
3789 enum machine_mode tmode
, mode0
, mode1
, mode2
;
3790 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
3794 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi
:
3795 icode
= CODE_FOR_altivec_lvx_16qi
;
3796 arg0
= TREE_VALUE (arglist
);
3797 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
3798 tmode
= insn_data
[icode
].operand
[0].mode
;
3799 mode0
= insn_data
[icode
].operand
[1].mode
;
3802 || GET_MODE (target
) != tmode
3803 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
3804 target
= gen_reg_rtx (tmode
);
3806 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
3807 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
3809 pat
= GEN_FCN (icode
) (target
, op0
);
3815 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi
:
3816 icode
= CODE_FOR_altivec_lvx_8hi
;
3817 arg0
= TREE_VALUE (arglist
);
3818 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
3819 tmode
= insn_data
[icode
].operand
[0].mode
;
3820 mode0
= insn_data
[icode
].operand
[1].mode
;
3823 || GET_MODE (target
) != tmode
3824 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
3825 target
= gen_reg_rtx (tmode
);
3827 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
3828 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
3830 pat
= GEN_FCN (icode
) (target
, op0
);
3836 case ALTIVEC_BUILTIN_LD_INTERNAL_4si
:
3837 icode
= CODE_FOR_altivec_lvx_4si
;
3838 arg0
= TREE_VALUE (arglist
);
3839 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
3840 tmode
= insn_data
[icode
].operand
[0].mode
;
3841 mode0
= insn_data
[icode
].operand
[1].mode
;
3844 || GET_MODE (target
) != tmode
3845 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
3846 target
= gen_reg_rtx (tmode
);
3848 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
3849 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
3851 pat
= GEN_FCN (icode
) (target
, op0
);
3857 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf
:
3858 icode
= CODE_FOR_altivec_lvx_4sf
;
3859 arg0
= TREE_VALUE (arglist
);
3860 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
3861 tmode
= insn_data
[icode
].operand
[0].mode
;
3862 mode0
= insn_data
[icode
].operand
[1].mode
;
3865 || GET_MODE (target
) != tmode
3866 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
3867 target
= gen_reg_rtx (tmode
);
3869 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
3870 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
3872 pat
= GEN_FCN (icode
) (target
, op0
);
3878 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi
:
3879 icode
= CODE_FOR_altivec_stvx_16qi
;
3880 arg0
= TREE_VALUE (arglist
);
3881 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
3882 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
3883 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
3884 mode0
= insn_data
[icode
].operand
[0].mode
;
3885 mode1
= insn_data
[icode
].operand
[1].mode
;
3887 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
3888 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
3889 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
3890 op1
= copy_to_mode_reg (mode1
, op1
);
3892 pat
= GEN_FCN (icode
) (op0
, op1
);
3897 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi
:
3898 icode
= CODE_FOR_altivec_stvx_8hi
;
3899 arg0
= TREE_VALUE (arglist
);
3900 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
3901 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
3902 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
3903 mode0
= insn_data
[icode
].operand
[0].mode
;
3904 mode1
= insn_data
[icode
].operand
[1].mode
;
3906 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
3907 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
3908 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
3909 op1
= copy_to_mode_reg (mode1
, op1
);
3911 pat
= GEN_FCN (icode
) (op0
, op1
);
3916 case ALTIVEC_BUILTIN_ST_INTERNAL_4si
:
3917 icode
= CODE_FOR_altivec_stvx_4si
;
3918 arg0
= TREE_VALUE (arglist
);
3919 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
3920 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
3921 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
3922 mode0
= insn_data
[icode
].operand
[0].mode
;
3923 mode1
= insn_data
[icode
].operand
[1].mode
;
3925 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
3926 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
3927 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
3928 op1
= copy_to_mode_reg (mode1
, op1
);
3930 pat
= GEN_FCN (icode
) (op0
, op1
);
3935 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf
:
3936 icode
= CODE_FOR_altivec_stvx_4sf
;
3937 arg0
= TREE_VALUE (arglist
);
3938 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
3939 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
3940 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
3941 mode0
= insn_data
[icode
].operand
[0].mode
;
3942 mode1
= insn_data
[icode
].operand
[1].mode
;
3944 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
3945 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
3946 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
3947 op1
= copy_to_mode_reg (mode1
, op1
);
3949 pat
= GEN_FCN (icode
) (op0
, op1
);
3954 case ALTIVEC_BUILTIN_STVX
:
3955 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx
, arglist
);
3956 case ALTIVEC_BUILTIN_STVEBX
:
3957 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx
, arglist
);
3958 case ALTIVEC_BUILTIN_STVEHX
:
3959 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx
, arglist
);
3960 case ALTIVEC_BUILTIN_STVEWX
:
3961 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx
, arglist
);
3962 case ALTIVEC_BUILTIN_STVXL
:
3963 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl
, arglist
);
3965 case ALTIVEC_BUILTIN_MFVSCR
:
3966 icode
= CODE_FOR_altivec_mfvscr
;
3967 tmode
= insn_data
[icode
].operand
[0].mode
;
3970 || GET_MODE (target
) != tmode
3971 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
3972 target
= gen_reg_rtx (tmode
);
3974 pat
= GEN_FCN (icode
) (target
);
3980 case ALTIVEC_BUILTIN_MTVSCR
:
3981 icode
= CODE_FOR_altivec_mtvscr
;
3982 arg0
= TREE_VALUE (arglist
);
3983 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
3984 mode0
= insn_data
[icode
].operand
[0].mode
;
3986 /* If we got invalid arguments bail out before generating bad rtl. */
3987 if (arg0
== error_mark_node
)
3990 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
3991 op0
= copy_to_mode_reg (mode0
, op0
);
3993 pat
= GEN_FCN (icode
) (op0
);
3998 case ALTIVEC_BUILTIN_DSSALL
:
3999 emit_insn (gen_altivec_dssall ());
4002 case ALTIVEC_BUILTIN_DSS
:
4003 icode
= CODE_FOR_altivec_dss
;
4004 arg0
= TREE_VALUE (arglist
);
4005 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4006 mode0
= insn_data
[icode
].operand
[0].mode
;
4008 /* If we got invalid arguments bail out before generating bad rtl. */
4009 if (arg0
== error_mark_node
)
4012 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
4013 op0
= copy_to_mode_reg (mode0
, op0
);
4015 emit_insn (gen_altivec_dss (op0
));
4019 /* Handle DST variants. */
4020 d
= (struct builtin_description
*) bdesc_dst
;
4021 for (i
= 0; i
< sizeof (bdesc_dst
) / sizeof *d
; i
++, d
++)
4022 if (d
->code
== fcode
)
4024 arg0
= TREE_VALUE (arglist
);
4025 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4026 arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4027 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4028 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4029 op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
4030 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
4031 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
4032 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
4034 /* Invalid arguments, bail out before generating bad rtl. */
4035 if (arg0
== error_mark_node
4036 || arg1
== error_mark_node
4037 || arg2
== error_mark_node
)
4040 if (! (*insn_data
[d
->icode
].operand
[0].predicate
) (op0
, mode0
))
4041 op0
= copy_to_mode_reg (mode0
, op0
);
4042 if (! (*insn_data
[d
->icode
].operand
[1].predicate
) (op1
, mode1
))
4043 op1
= copy_to_mode_reg (mode1
, op1
);
4045 if (GET_CODE (op2
) != CONST_INT
|| INTVAL (op2
) > 3)
4047 error ("argument 3 of `%s' must be a 2-bit literal", d
->name
);
4051 pat
= GEN_FCN (d
->icode
) (op0
, op1
, op2
);
4058 /* Expand abs* operations. */
4059 d
= (struct builtin_description
*) bdesc_abs
;
4060 for (i
= 0; i
< sizeof (bdesc_abs
) / sizeof *d
; i
++, d
++)
4061 if (d
->code
== fcode
)
4062 return altivec_expand_abs_builtin (d
->icode
, arglist
, target
);
4064 /* Handle simple unary operations. */
4065 d
= (struct builtin_description
*) bdesc_1arg
;
4066 for (i
= 0; i
< sizeof (bdesc_1arg
) / sizeof *d
; i
++, d
++)
4067 if (d
->code
== fcode
)
4068 return altivec_expand_unop_builtin (d
->icode
, arglist
, target
);
4070 /* Handle simple binary operations. */
4071 d
= (struct builtin_description
*) bdesc_2arg
;
4072 for (i
= 0; i
< sizeof (bdesc_2arg
) / sizeof *d
; i
++, d
++)
4073 if (d
->code
== fcode
)
4074 return altivec_expand_binop_builtin (d
->icode
, arglist
, target
);
4076 /* Expand the AltiVec predicates. */
4077 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
4078 for (i
= 0; i
< sizeof (bdesc_altivec_preds
) / sizeof *dp
; i
++, dp
++)
4079 if (dp
->code
== fcode
)
4080 return altivec_expand_predicate_builtin (dp
->icode
, dp
->opcode
, arglist
, target
);
4082 /* LV* are funky. We initialized them differently. */
4085 case ALTIVEC_BUILTIN_LVSL
:
4086 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvsl
,
4088 case ALTIVEC_BUILTIN_LVSR
:
4089 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvsr
,
4091 case ALTIVEC_BUILTIN_LVEBX
:
4092 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvebx
,
4094 case ALTIVEC_BUILTIN_LVEHX
:
4095 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvehx
,
4097 case ALTIVEC_BUILTIN_LVEWX
:
4098 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvewx
,
4100 case ALTIVEC_BUILTIN_LVXL
:
4101 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvxl
,
4103 case ALTIVEC_BUILTIN_LVX
:
4104 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvx
,
4111 /* Handle simple ternary operations. */
4112 d
= (struct builtin_description
*) bdesc_3arg
;
4113 for (i
= 0; i
< sizeof (bdesc_3arg
) / sizeof *d
; i
++, d
++)
4114 if (d
->code
== fcode
)
4115 return altivec_expand_ternop_builtin (d
->icode
, arglist
, target
);
4121 /* Expand an expression EXP that calls a built-in function,
4122 with result going to TARGET if that's convenient
4123 (and in mode MODE if that's convenient).
4124 SUBTARGET may be used as the target for computing one of EXP's operands.
4125 IGNORE is nonzero if the value is to be ignored. */
4128 rs6000_expand_builtin (exp
, target
, subtarget
, mode
, ignore
)
4131 rtx subtarget ATTRIBUTE_UNUSED
;
4132 enum machine_mode mode ATTRIBUTE_UNUSED
;
4133 int ignore ATTRIBUTE_UNUSED
;
4136 return altivec_expand_builtin (exp
, target
);
4142 rs6000_init_builtins ()
4145 altivec_init_builtins ();
4149 altivec_init_builtins (void)
4151 struct builtin_description
*d
;
4152 struct builtin_description_predicates
*dp
;
4155 tree endlink
= void_list_node
;
4157 tree pint_type_node
= build_pointer_type (integer_type_node
);
4158 tree pvoid_type_node
= build_pointer_type (void_type_node
);
4159 tree pshort_type_node
= build_pointer_type (short_integer_type_node
);
4160 tree pchar_type_node
= build_pointer_type (char_type_node
);
4161 tree pfloat_type_node
= build_pointer_type (float_type_node
);
4163 tree v4sf_ftype_v4sf_v4sf_v16qi
4164 = build_function_type (V4SF_type_node
,
4165 tree_cons (NULL_TREE
, V4SF_type_node
,
4166 tree_cons (NULL_TREE
, V4SF_type_node
,
4167 tree_cons (NULL_TREE
,
4170 tree v4si_ftype_v4si_v4si_v16qi
4171 = build_function_type (V4SI_type_node
,
4172 tree_cons (NULL_TREE
, V4SI_type_node
,
4173 tree_cons (NULL_TREE
, V4SI_type_node
,
4174 tree_cons (NULL_TREE
,
4177 tree v8hi_ftype_v8hi_v8hi_v16qi
4178 = build_function_type (V8HI_type_node
,
4179 tree_cons (NULL_TREE
, V8HI_type_node
,
4180 tree_cons (NULL_TREE
, V8HI_type_node
,
4181 tree_cons (NULL_TREE
,
4184 tree v16qi_ftype_v16qi_v16qi_v16qi
4185 = build_function_type (V16QI_type_node
,
4186 tree_cons (NULL_TREE
, V16QI_type_node
,
4187 tree_cons (NULL_TREE
, V16QI_type_node
,
4188 tree_cons (NULL_TREE
,
4192 /* V4SI foo (char). */
4193 tree v4si_ftype_char
4194 = build_function_type (V4SI_type_node
,
4195 tree_cons (NULL_TREE
, char_type_node
, endlink
));
4197 /* V8HI foo (char). */
4198 tree v8hi_ftype_char
4199 = build_function_type (V8HI_type_node
,
4200 tree_cons (NULL_TREE
, char_type_node
, endlink
));
4202 /* V16QI foo (char). */
4203 tree v16qi_ftype_char
4204 = build_function_type (V16QI_type_node
,
4205 tree_cons (NULL_TREE
, char_type_node
, endlink
));
4206 /* V4SF foo (V4SF). */
4207 tree v4sf_ftype_v4sf
4208 = build_function_type (V4SF_type_node
,
4209 tree_cons (NULL_TREE
, V4SF_type_node
, endlink
));
4211 /* V4SI foo (int *). */
4212 tree v4si_ftype_pint
4213 = build_function_type (V4SI_type_node
,
4214 tree_cons (NULL_TREE
, pint_type_node
, endlink
));
4215 /* V8HI foo (short *). */
4216 tree v8hi_ftype_pshort
4217 = build_function_type (V8HI_type_node
,
4218 tree_cons (NULL_TREE
, pshort_type_node
, endlink
));
4219 /* V16QI foo (char *). */
4220 tree v16qi_ftype_pchar
4221 = build_function_type (V16QI_type_node
,
4222 tree_cons (NULL_TREE
, pchar_type_node
, endlink
));
4223 /* V4SF foo (float *). */
4224 tree v4sf_ftype_pfloat
4225 = build_function_type (V4SF_type_node
,
4226 tree_cons (NULL_TREE
, pfloat_type_node
, endlink
));
4228 /* V8HI foo (V16QI). */
4229 tree v8hi_ftype_v16qi
4230 = build_function_type (V8HI_type_node
,
4231 tree_cons (NULL_TREE
, V16QI_type_node
, endlink
));
4233 /* void foo (void *, int, char/literal). */
4234 tree void_ftype_pvoid_int_char
4235 = build_function_type (void_type_node
,
4236 tree_cons (NULL_TREE
, pvoid_type_node
,
4237 tree_cons (NULL_TREE
, integer_type_node
,
4238 tree_cons (NULL_TREE
,
4242 /* void foo (int *, V4SI). */
4243 tree void_ftype_pint_v4si
4244 = build_function_type (void_type_node
,
4245 tree_cons (NULL_TREE
, pint_type_node
,
4246 tree_cons (NULL_TREE
, V4SI_type_node
,
4248 /* void foo (short *, V8HI). */
4249 tree void_ftype_pshort_v8hi
4250 = build_function_type (void_type_node
,
4251 tree_cons (NULL_TREE
, pshort_type_node
,
4252 tree_cons (NULL_TREE
, V8HI_type_node
,
4254 /* void foo (char *, V16QI). */
4255 tree void_ftype_pchar_v16qi
4256 = build_function_type (void_type_node
,
4257 tree_cons (NULL_TREE
, pchar_type_node
,
4258 tree_cons (NULL_TREE
, V16QI_type_node
,
4260 /* void foo (float *, V4SF). */
4261 tree void_ftype_pfloat_v4sf
4262 = build_function_type (void_type_node
,
4263 tree_cons (NULL_TREE
, pfloat_type_node
,
4264 tree_cons (NULL_TREE
, V4SF_type_node
,
4267 /* void foo (V4SI). */
4268 tree void_ftype_v4si
4269 = build_function_type (void_type_node
,
4270 tree_cons (NULL_TREE
, V4SI_type_node
,
4273 /* void foo (vint, int, void *). */
4274 tree void_ftype_v4si_int_pvoid
4275 = build_function_type (void_type_node
,
4276 tree_cons (NULL_TREE
, V4SI_type_node
,
4277 tree_cons (NULL_TREE
, integer_type_node
,
4278 tree_cons (NULL_TREE
,
4282 /* void foo (vchar, int, void *). */
4283 tree void_ftype_v16qi_int_pvoid
4284 = build_function_type (void_type_node
,
4285 tree_cons (NULL_TREE
, V16QI_type_node
,
4286 tree_cons (NULL_TREE
, integer_type_node
,
4287 tree_cons (NULL_TREE
,
4291 /* void foo (vshort, int, void *). */
4292 tree void_ftype_v8hi_int_pvoid
4293 = build_function_type (void_type_node
,
4294 tree_cons (NULL_TREE
, V8HI_type_node
,
4295 tree_cons (NULL_TREE
, integer_type_node
,
4296 tree_cons (NULL_TREE
,
4300 /* void foo (char). */
4302 = build_function_type (void_type_node
,
4303 tree_cons (NULL_TREE
, char_type_node
,
4306 /* void foo (void). */
4307 tree void_ftype_void
4308 = build_function_type (void_type_node
, void_list_node
);
4310 /* vshort foo (void). */
4311 tree v8hi_ftype_void
4312 = build_function_type (V8HI_type_node
, void_list_node
);
4314 tree v4si_ftype_v4si_v4si
4315 = build_function_type (V4SI_type_node
,
4316 tree_cons (NULL_TREE
, V4SI_type_node
,
4317 tree_cons (NULL_TREE
, V4SI_type_node
,
4320 /* These are for the unsigned 5 bit literals. */
4322 tree v4sf_ftype_v4si_char
4323 = build_function_type (V4SF_type_node
,
4324 tree_cons (NULL_TREE
, V4SI_type_node
,
4325 tree_cons (NULL_TREE
, char_type_node
,
4327 tree v4si_ftype_v4sf_char
4328 = build_function_type (V4SI_type_node
,
4329 tree_cons (NULL_TREE
, V4SF_type_node
,
4330 tree_cons (NULL_TREE
, char_type_node
,
4332 tree v4si_ftype_v4si_char
4333 = build_function_type (V4SI_type_node
,
4334 tree_cons (NULL_TREE
, V4SI_type_node
,
4335 tree_cons (NULL_TREE
, char_type_node
,
4337 tree v8hi_ftype_v8hi_char
4338 = build_function_type (V8HI_type_node
,
4339 tree_cons (NULL_TREE
, V8HI_type_node
,
4340 tree_cons (NULL_TREE
, char_type_node
,
4342 tree v16qi_ftype_v16qi_char
4343 = build_function_type (V16QI_type_node
,
4344 tree_cons (NULL_TREE
, V16QI_type_node
,
4345 tree_cons (NULL_TREE
, char_type_node
,
4348 /* These are for the unsigned 4 bit literals. */
4350 tree v16qi_ftype_v16qi_v16qi_char
4351 = build_function_type (V16QI_type_node
,
4352 tree_cons (NULL_TREE
, V16QI_type_node
,
4353 tree_cons (NULL_TREE
, V16QI_type_node
,
4354 tree_cons (NULL_TREE
,
4358 tree v8hi_ftype_v8hi_v8hi_char
4359 = build_function_type (V8HI_type_node
,
4360 tree_cons (NULL_TREE
, V8HI_type_node
,
4361 tree_cons (NULL_TREE
, V8HI_type_node
,
4362 tree_cons (NULL_TREE
,
4366 tree v4si_ftype_v4si_v4si_char
4367 = build_function_type (V4SI_type_node
,
4368 tree_cons (NULL_TREE
, V4SI_type_node
,
4369 tree_cons (NULL_TREE
, V4SI_type_node
,
4370 tree_cons (NULL_TREE
,
4374 tree v4sf_ftype_v4sf_v4sf_char
4375 = build_function_type (V4SF_type_node
,
4376 tree_cons (NULL_TREE
, V4SF_type_node
,
4377 tree_cons (NULL_TREE
, V4SF_type_node
,
4378 tree_cons (NULL_TREE
,
4382 /* End of 4 bit literals. */
4384 tree v4sf_ftype_v4sf_v4sf
4385 = build_function_type (V4SF_type_node
,
4386 tree_cons (NULL_TREE
, V4SF_type_node
,
4387 tree_cons (NULL_TREE
, V4SF_type_node
,
4389 tree v4sf_ftype_v4sf_v4sf_v4si
4390 = build_function_type (V4SF_type_node
,
4391 tree_cons (NULL_TREE
, V4SF_type_node
,
4392 tree_cons (NULL_TREE
, V4SF_type_node
,
4393 tree_cons (NULL_TREE
,
4396 tree v4sf_ftype_v4sf_v4sf_v4sf
4397 = build_function_type (V4SF_type_node
,
4398 tree_cons (NULL_TREE
, V4SF_type_node
,
4399 tree_cons (NULL_TREE
, V4SF_type_node
,
4400 tree_cons (NULL_TREE
,
4403 tree v4si_ftype_v4si_v4si_v4si
4404 = build_function_type (V4SI_type_node
,
4405 tree_cons (NULL_TREE
, V4SI_type_node
,
4406 tree_cons (NULL_TREE
, V4SI_type_node
,
4407 tree_cons (NULL_TREE
,
4411 tree v8hi_ftype_v8hi_v8hi
4412 = build_function_type (V8HI_type_node
,
4413 tree_cons (NULL_TREE
, V8HI_type_node
,
4414 tree_cons (NULL_TREE
, V8HI_type_node
,
4416 tree v8hi_ftype_v8hi_v8hi_v8hi
4417 = build_function_type (V8HI_type_node
,
4418 tree_cons (NULL_TREE
, V8HI_type_node
,
4419 tree_cons (NULL_TREE
, V8HI_type_node
,
4420 tree_cons (NULL_TREE
,
4423 tree v4si_ftype_v8hi_v8hi_v4si
4424 = build_function_type (V4SI_type_node
,
4425 tree_cons (NULL_TREE
, V8HI_type_node
,
4426 tree_cons (NULL_TREE
, V8HI_type_node
,
4427 tree_cons (NULL_TREE
,
4430 tree v4si_ftype_v16qi_v16qi_v4si
4431 = build_function_type (V4SI_type_node
,
4432 tree_cons (NULL_TREE
, V16QI_type_node
,
4433 tree_cons (NULL_TREE
, V16QI_type_node
,
4434 tree_cons (NULL_TREE
,
4438 tree v16qi_ftype_v16qi_v16qi
4439 = build_function_type (V16QI_type_node
,
4440 tree_cons (NULL_TREE
, V16QI_type_node
,
4441 tree_cons (NULL_TREE
, V16QI_type_node
,
4444 tree v4si_ftype_v4sf_v4sf
4445 = build_function_type (V4SI_type_node
,
4446 tree_cons (NULL_TREE
, V4SF_type_node
,
4447 tree_cons (NULL_TREE
, V4SF_type_node
,
4450 tree v4si_ftype_v4si
4451 = build_function_type (V4SI_type_node
,
4452 tree_cons (NULL_TREE
, V4SI_type_node
, endlink
));
4454 tree v8hi_ftype_v8hi
4455 = build_function_type (V8HI_type_node
,
4456 tree_cons (NULL_TREE
, V8HI_type_node
, endlink
));
4458 tree v16qi_ftype_v16qi
4459 = build_function_type (V16QI_type_node
,
4460 tree_cons (NULL_TREE
, V16QI_type_node
, endlink
));
4462 tree v8hi_ftype_v16qi_v16qi
4463 = build_function_type (V8HI_type_node
,
4464 tree_cons (NULL_TREE
, V16QI_type_node
,
4465 tree_cons (NULL_TREE
, V16QI_type_node
,
4468 tree v4si_ftype_v8hi_v8hi
4469 = build_function_type (V4SI_type_node
,
4470 tree_cons (NULL_TREE
, V8HI_type_node
,
4471 tree_cons (NULL_TREE
, V8HI_type_node
,
4474 tree v8hi_ftype_v4si_v4si
4475 = build_function_type (V8HI_type_node
,
4476 tree_cons (NULL_TREE
, V4SI_type_node
,
4477 tree_cons (NULL_TREE
, V4SI_type_node
,
4480 tree v16qi_ftype_v8hi_v8hi
4481 = build_function_type (V16QI_type_node
,
4482 tree_cons (NULL_TREE
, V8HI_type_node
,
4483 tree_cons (NULL_TREE
, V8HI_type_node
,
4486 tree v4si_ftype_v16qi_v4si
4487 = build_function_type (V4SI_type_node
,
4488 tree_cons (NULL_TREE
, V16QI_type_node
,
4489 tree_cons (NULL_TREE
, V4SI_type_node
,
4492 tree v4si_ftype_v16qi_v16qi
4493 = build_function_type (V4SI_type_node
,
4494 tree_cons (NULL_TREE
, V16QI_type_node
,
4495 tree_cons (NULL_TREE
, V16QI_type_node
,
4498 tree v4si_ftype_v8hi_v4si
4499 = build_function_type (V4SI_type_node
,
4500 tree_cons (NULL_TREE
, V8HI_type_node
,
4501 tree_cons (NULL_TREE
, V4SI_type_node
,
4504 tree v4si_ftype_v8hi
4505 = build_function_type (V4SI_type_node
,
4506 tree_cons (NULL_TREE
, V8HI_type_node
, endlink
));
4508 tree int_ftype_v4si_v4si
4509 = build_function_type (integer_type_node
,
4510 tree_cons (NULL_TREE
, V4SI_type_node
,
4511 tree_cons (NULL_TREE
, V4SI_type_node
,
4514 tree int_ftype_v4sf_v4sf
4515 = build_function_type (integer_type_node
,
4516 tree_cons (NULL_TREE
, V4SF_type_node
,
4517 tree_cons (NULL_TREE
, V4SF_type_node
,
4520 tree int_ftype_v16qi_v16qi
4521 = build_function_type (integer_type_node
,
4522 tree_cons (NULL_TREE
, V16QI_type_node
,
4523 tree_cons (NULL_TREE
, V16QI_type_node
,
4526 tree int_ftype_int_v4si_v4si
4527 = build_function_type
4529 tree_cons (NULL_TREE
, integer_type_node
,
4530 tree_cons (NULL_TREE
, V4SI_type_node
,
4531 tree_cons (NULL_TREE
, V4SI_type_node
,
4534 tree int_ftype_int_v4sf_v4sf
4535 = build_function_type
4537 tree_cons (NULL_TREE
, integer_type_node
,
4538 tree_cons (NULL_TREE
, V4SF_type_node
,
4539 tree_cons (NULL_TREE
, V4SF_type_node
,
4542 tree int_ftype_int_v8hi_v8hi
4543 = build_function_type
4545 tree_cons (NULL_TREE
, integer_type_node
,
4546 tree_cons (NULL_TREE
, V8HI_type_node
,
4547 tree_cons (NULL_TREE
, V8HI_type_node
,
4550 tree int_ftype_int_v16qi_v16qi
4551 = build_function_type
4553 tree_cons (NULL_TREE
, integer_type_node
,
4554 tree_cons (NULL_TREE
, V16QI_type_node
,
4555 tree_cons (NULL_TREE
, V16QI_type_node
,
4558 tree v16qi_ftype_int_pvoid
4559 = build_function_type (V16QI_type_node
,
4560 tree_cons (NULL_TREE
, integer_type_node
,
4561 tree_cons (NULL_TREE
, pvoid_type_node
,
4564 tree v4si_ftype_int_pvoid
4565 = build_function_type (V4SI_type_node
,
4566 tree_cons (NULL_TREE
, integer_type_node
,
4567 tree_cons (NULL_TREE
, pvoid_type_node
,
4570 tree v8hi_ftype_int_pvoid
4571 = build_function_type (V8HI_type_node
,
4572 tree_cons (NULL_TREE
, integer_type_node
,
4573 tree_cons (NULL_TREE
, pvoid_type_node
,
4576 tree int_ftype_v8hi_v8hi
4577 = build_function_type (integer_type_node
,
4578 tree_cons (NULL_TREE
, V8HI_type_node
,
4579 tree_cons (NULL_TREE
, V8HI_type_node
,
4582 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pfloat
, ALTIVEC_BUILTIN_LD_INTERNAL_4sf
);
4583 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf
, ALTIVEC_BUILTIN_ST_INTERNAL_4sf
);
4584 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4si", v4si_ftype_pint
, ALTIVEC_BUILTIN_LD_INTERNAL_4si
);
4585 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si
, ALTIVEC_BUILTIN_ST_INTERNAL_4si
);
4586 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pshort
, ALTIVEC_BUILTIN_LD_INTERNAL_8hi
);
4587 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi
, ALTIVEC_BUILTIN_ST_INTERNAL_8hi
);
4588 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pchar
, ALTIVEC_BUILTIN_LD_INTERNAL_16qi
);
4589 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi
, ALTIVEC_BUILTIN_ST_INTERNAL_16qi
);
4590 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mtvscr", void_ftype_v4si
, ALTIVEC_BUILTIN_MTVSCR
);
4591 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mfvscr", v8hi_ftype_void
, ALTIVEC_BUILTIN_MFVSCR
);
4592 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dssall", void_ftype_void
, ALTIVEC_BUILTIN_DSSALL
);
4593 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dss", void_ftype_qi
, ALTIVEC_BUILTIN_DSS
);
4594 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsl", v16qi_ftype_int_pvoid
, ALTIVEC_BUILTIN_LVSL
);
4595 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsr", v16qi_ftype_int_pvoid
, ALTIVEC_BUILTIN_LVSR
);
4596 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvebx", v16qi_ftype_int_pvoid
, ALTIVEC_BUILTIN_LVEBX
);
4597 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvehx", v8hi_ftype_int_pvoid
, ALTIVEC_BUILTIN_LVEHX
);
4598 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvewx", v4si_ftype_int_pvoid
, ALTIVEC_BUILTIN_LVEWX
);
4599 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvxl", v4si_ftype_int_pvoid
, ALTIVEC_BUILTIN_LVXL
);
4600 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvx", v4si_ftype_int_pvoid
, ALTIVEC_BUILTIN_LVX
);
4601 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVX
);
4602 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid
, ALTIVEC_BUILTIN_STVEBX
);
4603 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid
, ALTIVEC_BUILTIN_STVEHX
);
4604 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVEWX
);
4605 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid
, ALTIVEC_BUILTIN_STVXL
);
4607 /* Add the simple ternary operators. */
4608 d
= (struct builtin_description
*) bdesc_3arg
;
4609 for (i
= 0; i
< sizeof (bdesc_3arg
) / sizeof *d
; i
++, d
++)
4612 enum machine_mode mode0
, mode1
, mode2
, mode3
;
4618 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
4619 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
4620 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
4621 mode3
= insn_data
[d
->icode
].operand
[3].mode
;
4623 /* When all four are of the same mode. */
4624 if (mode0
== mode1
&& mode1
== mode2
&& mode2
== mode3
)
4629 type
= v4si_ftype_v4si_v4si_v4si
;
4632 type
= v4sf_ftype_v4sf_v4sf_v4sf
;
4635 type
= v8hi_ftype_v8hi_v8hi_v8hi
;
4638 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
4644 else if (mode0
== mode1
&& mode1
== mode2
&& mode3
== V16QImode
)
4649 type
= v4si_ftype_v4si_v4si_v16qi
;
4652 type
= v4sf_ftype_v4sf_v4sf_v16qi
;
4655 type
= v8hi_ftype_v8hi_v8hi_v16qi
;
4658 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
4664 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
4665 && mode3
== V4SImode
)
4666 type
= v4si_ftype_v16qi_v16qi_v4si
;
4667 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
4668 && mode3
== V4SImode
)
4669 type
= v4si_ftype_v8hi_v8hi_v4si
;
4670 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
4671 && mode3
== V4SImode
)
4672 type
= v4sf_ftype_v4sf_v4sf_v4si
;
4674 /* vchar, vchar, vchar, 4 bit literal. */
4675 else if (mode0
== V16QImode
&& mode1
== mode0
&& mode2
== mode0
4677 type
= v16qi_ftype_v16qi_v16qi_char
;
4679 /* vshort, vshort, vshort, 4 bit literal. */
4680 else if (mode0
== V8HImode
&& mode1
== mode0
&& mode2
== mode0
4682 type
= v8hi_ftype_v8hi_v8hi_char
;
4684 /* vint, vint, vint, 4 bit literal. */
4685 else if (mode0
== V4SImode
&& mode1
== mode0
&& mode2
== mode0
4687 type
= v4si_ftype_v4si_v4si_char
;
4689 /* vfloat, vfloat, vfloat, 4 bit literal. */
4690 else if (mode0
== V4SFmode
&& mode1
== mode0
&& mode2
== mode0
4692 type
= v4sf_ftype_v4sf_v4sf_char
;
4697 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
4700 /* Add the DST variants. */
4701 d
= (struct builtin_description
*) bdesc_dst
;
4702 for (i
= 0; i
< sizeof (bdesc_dst
) / sizeof *d
; i
++, d
++)
4703 def_builtin (d
->mask
, d
->name
, void_ftype_pvoid_int_char
, d
->code
);
4705 /* Initialize the predicates. */
4706 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
4707 for (i
= 0; i
< sizeof (bdesc_altivec_preds
) / sizeof *dp
; i
++, dp
++)
4709 enum machine_mode mode1
;
4712 mode1
= insn_data
[dp
->icode
].operand
[1].mode
;
4717 type
= int_ftype_int_v4si_v4si
;
4720 type
= int_ftype_int_v8hi_v8hi
;
4723 type
= int_ftype_int_v16qi_v16qi
;
4726 type
= int_ftype_int_v4sf_v4sf
;
4732 def_builtin (dp
->mask
, dp
->name
, type
, dp
->code
);
4735 /* Add the simple binary operators. */
4736 d
= (struct builtin_description
*) bdesc_2arg
;
4737 for (i
= 0; i
< sizeof (bdesc_2arg
) / sizeof *d
; i
++, d
++)
4739 enum machine_mode mode0
, mode1
, mode2
;
4745 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
4746 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
4747 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
4749 /* When all three operands are of the same mode. */
4750 if (mode0
== mode1
&& mode1
== mode2
)
4755 type
= v4sf_ftype_v4sf_v4sf
;
4758 type
= v4si_ftype_v4si_v4si
;
4761 type
= v16qi_ftype_v16qi_v16qi
;
4764 type
= v8hi_ftype_v8hi_v8hi
;
4771 /* A few other combos we really don't want to do manually. */
4773 /* vint, vfloat, vfloat. */
4774 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
)
4775 type
= v4si_ftype_v4sf_v4sf
;
4777 /* vshort, vchar, vchar. */
4778 else if (mode0
== V8HImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
4779 type
= v8hi_ftype_v16qi_v16qi
;
4781 /* vint, vshort, vshort. */
4782 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
4783 type
= v4si_ftype_v8hi_v8hi
;
4785 /* vshort, vint, vint. */
4786 else if (mode0
== V8HImode
&& mode1
== V4SImode
&& mode2
== V4SImode
)
4787 type
= v8hi_ftype_v4si_v4si
;
4789 /* vchar, vshort, vshort. */
4790 else if (mode0
== V16QImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
4791 type
= v16qi_ftype_v8hi_v8hi
;
4793 /* vint, vchar, vint. */
4794 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V4SImode
)
4795 type
= v4si_ftype_v16qi_v4si
;
4797 /* vint, vchar, vchar. */
4798 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
4799 type
= v4si_ftype_v16qi_v16qi
;
4801 /* vint, vshort, vint. */
4802 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V4SImode
)
4803 type
= v4si_ftype_v8hi_v4si
;
4805 /* vint, vint, 5 bit literal. */
4806 else if (mode0
== V4SImode
&& mode1
== V4SImode
&& mode2
== QImode
)
4807 type
= v4si_ftype_v4si_char
;
4809 /* vshort, vshort, 5 bit literal. */
4810 else if (mode0
== V8HImode
&& mode1
== V8HImode
&& mode2
== QImode
)
4811 type
= v8hi_ftype_v8hi_char
;
4813 /* vchar, vchar, 5 bit literal. */
4814 else if (mode0
== V16QImode
&& mode1
== V16QImode
&& mode2
== QImode
)
4815 type
= v16qi_ftype_v16qi_char
;
4817 /* vfloat, vint, 5 bit literal. */
4818 else if (mode0
== V4SFmode
&& mode1
== V4SImode
&& mode2
== QImode
)
4819 type
= v4sf_ftype_v4si_char
;
4821 /* vint, vfloat, 5 bit literal. */
4822 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== QImode
)
4823 type
= v4si_ftype_v4sf_char
;
4826 else if (mode0
== SImode
)
4831 type
= int_ftype_v4si_v4si
;
4834 type
= int_ftype_v4sf_v4sf
;
4837 type
= int_ftype_v16qi_v16qi
;
4840 type
= int_ftype_v8hi_v8hi
;
4850 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
4853 /* Initialize the abs* operators. */
4854 d
= (struct builtin_description
*) bdesc_abs
;
4855 for (i
= 0; i
< sizeof (bdesc_abs
) / sizeof *d
; i
++, d
++)
4857 enum machine_mode mode0
;
4860 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
4865 type
= v4si_ftype_v4si
;
4868 type
= v8hi_ftype_v8hi
;
4871 type
= v16qi_ftype_v16qi
;
4874 type
= v4sf_ftype_v4sf
;
4880 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
4883 /* Add the simple unary operators. */
4884 d
= (struct builtin_description
*) bdesc_1arg
;
4885 for (i
= 0; i
< sizeof (bdesc_1arg
) / sizeof *d
; i
++, d
++)
4887 enum machine_mode mode0
, mode1
;
4893 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
4894 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
4896 if (mode0
== V4SImode
&& mode1
== QImode
)
4897 type
= v4si_ftype_char
;
4898 else if (mode0
== V8HImode
&& mode1
== QImode
)
4899 type
= v8hi_ftype_char
;
4900 else if (mode0
== V16QImode
&& mode1
== QImode
)
4901 type
= v16qi_ftype_char
;
4902 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
)
4903 type
= v4sf_ftype_v4sf
;
4904 else if (mode0
== V8HImode
&& mode1
== V16QImode
)
4905 type
= v8hi_ftype_v16qi
;
4906 else if (mode0
== V4SImode
&& mode1
== V8HImode
)
4907 type
= v4si_ftype_v8hi
;
4911 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
4916 /* Generate a memory reference for expand_block_move, copying volatile,
4917 and other bits from an original memory reference. */
4920 expand_block_move_mem (mode
, addr
, orig_mem
)
4921 enum machine_mode mode
;
4925 rtx mem
= gen_rtx_MEM (mode
, addr
);
4927 MEM_COPY_ATTRIBUTES (mem
, orig_mem
);
4931 /* Expand a block move operation, and return 1 if successful. Return 0
4932 if we should let the compiler generate normal code.
4934 operands[0] is the destination
4935 operands[1] is the source
4936 operands[2] is the length
4937 operands[3] is the alignment */
4939 #define MAX_MOVE_REG 4
4942 expand_block_move (operands
)
4945 rtx orig_dest
= operands
[0];
4946 rtx orig_src
= operands
[1];
4947 rtx bytes_rtx
= operands
[2];
4948 rtx align_rtx
= operands
[3];
4949 int constp
= (GET_CODE (bytes_rtx
) == CONST_INT
);
4960 rtx stores
[MAX_MOVE_REG
];
4963 /* If this is not a fixed size move, just call memcpy */
4967 /* If this is not a fixed size alignment, abort */
4968 if (GET_CODE (align_rtx
) != CONST_INT
)
4970 align
= INTVAL (align_rtx
);
4972 /* Anything to move? */
4973 bytes
= INTVAL (bytes_rtx
);
4977 /* store_one_arg depends on expand_block_move to handle at least the size of
4978 reg_parm_stack_space. */
4979 if (bytes
> (TARGET_POWERPC64
? 64 : 32))
4982 /* Move the address into scratch registers. */
4983 dest_reg
= copy_addr_to_reg (XEXP (orig_dest
, 0));
4984 src_reg
= copy_addr_to_reg (XEXP (orig_src
, 0));
4986 if (TARGET_STRING
) /* string instructions are available */
4988 for ( ; bytes
> 0; bytes
-= move_bytes
)
4990 if (bytes
> 24 /* move up to 32 bytes at a time */
4998 && ! fixed_regs
[12])
5000 move_bytes
= (bytes
> 32) ? 32 : bytes
;
5001 emit_insn (gen_movstrsi_8reg (expand_block_move_mem (BLKmode
,
5004 expand_block_move_mem (BLKmode
,
5007 GEN_INT ((move_bytes
== 32)
5011 else if (bytes
> 16 /* move up to 24 bytes at a time */
5017 && ! fixed_regs
[10])
5019 move_bytes
= (bytes
> 24) ? 24 : bytes
;
5020 emit_insn (gen_movstrsi_6reg (expand_block_move_mem (BLKmode
,
5023 expand_block_move_mem (BLKmode
,
5026 GEN_INT (move_bytes
),
5029 else if (bytes
> 8 /* move up to 16 bytes at a time */
5035 move_bytes
= (bytes
> 16) ? 16 : bytes
;
5036 emit_insn (gen_movstrsi_4reg (expand_block_move_mem (BLKmode
,
5039 expand_block_move_mem (BLKmode
,
5042 GEN_INT (move_bytes
),
5045 else if (bytes
>= 8 && TARGET_POWERPC64
5046 /* 64-bit loads and stores require word-aligned
5048 && (align
>= 8 || (! STRICT_ALIGNMENT
&& align
>= 4)))
5051 tmp_reg
= gen_reg_rtx (DImode
);
5052 emit_move_insn (tmp_reg
,
5053 expand_block_move_mem (DImode
,
5054 src_reg
, orig_src
));
5055 emit_move_insn (expand_block_move_mem (DImode
,
5056 dest_reg
, orig_dest
),
5059 else if (bytes
> 4 && !TARGET_POWERPC64
)
5060 { /* move up to 8 bytes at a time */
5061 move_bytes
= (bytes
> 8) ? 8 : bytes
;
5062 emit_insn (gen_movstrsi_2reg (expand_block_move_mem (BLKmode
,
5065 expand_block_move_mem (BLKmode
,
5068 GEN_INT (move_bytes
),
5071 else if (bytes
>= 4 && (align
>= 4 || ! STRICT_ALIGNMENT
))
5072 { /* move 4 bytes */
5074 tmp_reg
= gen_reg_rtx (SImode
);
5075 emit_move_insn (tmp_reg
,
5076 expand_block_move_mem (SImode
,
5077 src_reg
, orig_src
));
5078 emit_move_insn (expand_block_move_mem (SImode
,
5079 dest_reg
, orig_dest
),
5082 else if (bytes
== 2 && (align
>= 2 || ! STRICT_ALIGNMENT
))
5083 { /* move 2 bytes */
5085 tmp_reg
= gen_reg_rtx (HImode
);
5086 emit_move_insn (tmp_reg
,
5087 expand_block_move_mem (HImode
,
5088 src_reg
, orig_src
));
5089 emit_move_insn (expand_block_move_mem (HImode
,
5090 dest_reg
, orig_dest
),
5093 else if (bytes
== 1) /* move 1 byte */
5096 tmp_reg
= gen_reg_rtx (QImode
);
5097 emit_move_insn (tmp_reg
,
5098 expand_block_move_mem (QImode
,
5099 src_reg
, orig_src
));
5100 emit_move_insn (expand_block_move_mem (QImode
,
5101 dest_reg
, orig_dest
),
5105 { /* move up to 4 bytes at a time */
5106 move_bytes
= (bytes
> 4) ? 4 : bytes
;
5107 emit_insn (gen_movstrsi_1reg (expand_block_move_mem (BLKmode
,
5110 expand_block_move_mem (BLKmode
,
5113 GEN_INT (move_bytes
),
5117 if (bytes
> move_bytes
)
5119 if (! TARGET_POWERPC64
)
5121 emit_insn (gen_addsi3 (src_reg
, src_reg
,
5122 GEN_INT (move_bytes
)));
5123 emit_insn (gen_addsi3 (dest_reg
, dest_reg
,
5124 GEN_INT (move_bytes
)));
5128 emit_insn (gen_adddi3 (src_reg
, src_reg
,
5129 GEN_INT (move_bytes
)));
5130 emit_insn (gen_adddi3 (dest_reg
, dest_reg
,
5131 GEN_INT (move_bytes
)));
5137 else /* string instructions not available */
5139 num_reg
= offset
= 0;
5140 for ( ; bytes
> 0; (bytes
-= move_bytes
), (offset
+= move_bytes
))
5142 /* Calculate the correct offset for src/dest */
5146 dest_addr
= dest_reg
;
5150 src_addr
= plus_constant (src_reg
, offset
);
5151 dest_addr
= plus_constant (dest_reg
, offset
);
5154 /* Generate the appropriate load and store, saving the stores
5156 if (bytes
>= 8 && TARGET_POWERPC64
5157 /* 64-bit loads and stores require word-aligned
5159 && (align
>= 8 || (! STRICT_ALIGNMENT
&& align
>= 4)))
5162 tmp_reg
= gen_reg_rtx (DImode
);
5163 emit_insn (gen_movdi (tmp_reg
,
5164 expand_block_move_mem (DImode
,
5167 stores
[num_reg
++] = gen_movdi (expand_block_move_mem (DImode
,
5172 else if (bytes
>= 4 && (align
>= 4 || ! STRICT_ALIGNMENT
))
5175 tmp_reg
= gen_reg_rtx (SImode
);
5176 emit_insn (gen_movsi (tmp_reg
,
5177 expand_block_move_mem (SImode
,
5180 stores
[num_reg
++] = gen_movsi (expand_block_move_mem (SImode
,
5185 else if (bytes
>= 2 && (align
>= 2 || ! STRICT_ALIGNMENT
))
5188 tmp_reg
= gen_reg_rtx (HImode
);
5189 emit_insn (gen_movhi (tmp_reg
,
5190 expand_block_move_mem (HImode
,
5193 stores
[num_reg
++] = gen_movhi (expand_block_move_mem (HImode
,
5201 tmp_reg
= gen_reg_rtx (QImode
);
5202 emit_insn (gen_movqi (tmp_reg
,
5203 expand_block_move_mem (QImode
,
5206 stores
[num_reg
++] = gen_movqi (expand_block_move_mem (QImode
,
5212 if (num_reg
>= MAX_MOVE_REG
)
5214 for (i
= 0; i
< num_reg
; i
++)
5215 emit_insn (stores
[i
]);
5220 for (i
= 0; i
< num_reg
; i
++)
5221 emit_insn (stores
[i
]);
5228 /* Return 1 if OP is a load multiple operation. It is known to be a
5229 PARALLEL and the first section will be tested. */
5232 load_multiple_operation (op
, mode
)
5234 enum machine_mode mode ATTRIBUTE_UNUSED
;
5236 int count
= XVECLEN (op
, 0);
5237 unsigned int dest_regno
;
5241 /* Perform a quick check so we don't blow up below. */
5243 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
5244 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
5245 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
5248 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
5249 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
5251 for (i
= 1; i
< count
; i
++)
5253 rtx elt
= XVECEXP (op
, 0, i
);
5255 if (GET_CODE (elt
) != SET
5256 || GET_CODE (SET_DEST (elt
)) != REG
5257 || GET_MODE (SET_DEST (elt
)) != SImode
5258 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
5259 || GET_CODE (SET_SRC (elt
)) != MEM
5260 || GET_MODE (SET_SRC (elt
)) != SImode
5261 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != PLUS
5262 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt
), 0), 0), src_addr
)
5263 || GET_CODE (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != CONST_INT
5264 || INTVAL (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != i
* 4)
5271 /* Similar, but tests for store multiple. Here, the second vector element
5272 is a CLOBBER. It will be tested later. */
5275 store_multiple_operation (op
, mode
)
5277 enum machine_mode mode ATTRIBUTE_UNUSED
;
5279 int count
= XVECLEN (op
, 0) - 1;
5280 unsigned int src_regno
;
5284 /* Perform a quick check so we don't blow up below. */
5286 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
5287 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
5288 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
5291 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
5292 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
5294 for (i
= 1; i
< count
; i
++)
5296 rtx elt
= XVECEXP (op
, 0, i
+ 1);
5298 if (GET_CODE (elt
) != SET
5299 || GET_CODE (SET_SRC (elt
)) != REG
5300 || GET_MODE (SET_SRC (elt
)) != SImode
5301 || REGNO (SET_SRC (elt
)) != src_regno
+ i
5302 || GET_CODE (SET_DEST (elt
)) != MEM
5303 || GET_MODE (SET_DEST (elt
)) != SImode
5304 || GET_CODE (XEXP (SET_DEST (elt
), 0)) != PLUS
5305 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt
), 0), 0), dest_addr
)
5306 || GET_CODE (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != CONST_INT
5307 || INTVAL (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != i
* 4)
5314 /* Return 1 for a parallel vrsave operation. */
5317 vrsave_operation (op
, mode
)
5319 enum machine_mode mode ATTRIBUTE_UNUSED
;
5321 int count
= XVECLEN (op
, 0);
5322 unsigned int dest_regno
, src_regno
;
5326 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
5327 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
5328 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC_VOLATILE
)
5331 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
5332 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
5334 if (dest_regno
!= VRSAVE_REGNO
5335 && src_regno
!= VRSAVE_REGNO
)
5338 for (i
= 1; i
< count
; i
++)
5340 rtx elt
= XVECEXP (op
, 0, i
);
5342 if (GET_CODE (elt
) != CLOBBER
5343 && GET_CODE (elt
) != SET
)
5350 /* Return 1 for an PARALLEL suitable for mtcrf. */
5353 mtcrf_operation (op
, mode
)
5355 enum machine_mode mode ATTRIBUTE_UNUSED
;
5357 int count
= XVECLEN (op
, 0);
5361 /* Perform a quick check so we don't blow up below. */
5363 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
5364 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC
5365 || XVECLEN (SET_SRC (XVECEXP (op
, 0, 0)), 0) != 2)
5367 src_reg
= XVECEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0, 0);
5369 if (GET_CODE (src_reg
) != REG
5370 || GET_MODE (src_reg
) != SImode
5371 || ! INT_REGNO_P (REGNO (src_reg
)))
5374 for (i
= 0; i
< count
; i
++)
5376 rtx exp
= XVECEXP (op
, 0, i
);
5380 if (GET_CODE (exp
) != SET
5381 || GET_CODE (SET_DEST (exp
)) != REG
5382 || GET_MODE (SET_DEST (exp
)) != CCmode
5383 || ! CR_REGNO_P (REGNO (SET_DEST (exp
))))
5385 unspec
= SET_SRC (exp
);
5386 maskval
= 1 << (MAX_CR_REGNO
- REGNO (SET_DEST (exp
)));
5388 if (GET_CODE (unspec
) != UNSPEC
5389 || XINT (unspec
, 1) != 20
5390 || XVECLEN (unspec
, 0) != 2
5391 || XVECEXP (unspec
, 0, 0) != src_reg
5392 || GET_CODE (XVECEXP (unspec
, 0, 1)) != CONST_INT
5393 || INTVAL (XVECEXP (unspec
, 0, 1)) != maskval
)
5399 /* Return 1 for an PARALLEL suitable for lmw. */
5402 lmw_operation (op
, mode
)
5404 enum machine_mode mode ATTRIBUTE_UNUSED
;
5406 int count
= XVECLEN (op
, 0);
5407 unsigned int dest_regno
;
5409 unsigned int base_regno
;
5410 HOST_WIDE_INT offset
;
5413 /* Perform a quick check so we don't blow up below. */
5415 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
5416 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
5417 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
5420 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
5421 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
5424 || count
!= 32 - (int) dest_regno
)
5427 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr
, 0))
5430 base_regno
= REGNO (src_addr
);
5431 if (base_regno
== 0)
5434 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, src_addr
, 0))
5436 offset
= INTVAL (XEXP (src_addr
, 1));
5437 base_regno
= REGNO (XEXP (src_addr
, 0));
5442 for (i
= 0; i
< count
; i
++)
5444 rtx elt
= XVECEXP (op
, 0, i
);
5447 HOST_WIDE_INT newoffset
;
5449 if (GET_CODE (elt
) != SET
5450 || GET_CODE (SET_DEST (elt
)) != REG
5451 || GET_MODE (SET_DEST (elt
)) != SImode
5452 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
5453 || GET_CODE (SET_SRC (elt
)) != MEM
5454 || GET_MODE (SET_SRC (elt
)) != SImode
)
5456 newaddr
= XEXP (SET_SRC (elt
), 0);
5457 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr
, 0))
5462 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, newaddr
, 0))
5464 addr_reg
= XEXP (newaddr
, 0);
5465 newoffset
= INTVAL (XEXP (newaddr
, 1));
5469 if (REGNO (addr_reg
) != base_regno
5470 || newoffset
!= offset
+ 4 * i
)
5477 /* Return 1 for an PARALLEL suitable for stmw. */
5480 stmw_operation (op
, mode
)
5482 enum machine_mode mode ATTRIBUTE_UNUSED
;
5484 int count
= XVECLEN (op
, 0);
5485 unsigned int src_regno
;
5487 unsigned int base_regno
;
5488 HOST_WIDE_INT offset
;
5491 /* Perform a quick check so we don't blow up below. */
5493 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
5494 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
5495 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
5498 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
5499 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
5502 || count
!= 32 - (int) src_regno
)
5505 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr
, 0))
5508 base_regno
= REGNO (dest_addr
);
5509 if (base_regno
== 0)
5512 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, dest_addr
, 0))
5514 offset
= INTVAL (XEXP (dest_addr
, 1));
5515 base_regno
= REGNO (XEXP (dest_addr
, 0));
5520 for (i
= 0; i
< count
; i
++)
5522 rtx elt
= XVECEXP (op
, 0, i
);
5525 HOST_WIDE_INT newoffset
;
5527 if (GET_CODE (elt
) != SET
5528 || GET_CODE (SET_SRC (elt
)) != REG
5529 || GET_MODE (SET_SRC (elt
)) != SImode
5530 || REGNO (SET_SRC (elt
)) != src_regno
+ i
5531 || GET_CODE (SET_DEST (elt
)) != MEM
5532 || GET_MODE (SET_DEST (elt
)) != SImode
)
5534 newaddr
= XEXP (SET_DEST (elt
), 0);
5535 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr
, 0))
5540 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode
, newaddr
, 0))
5542 addr_reg
= XEXP (newaddr
, 0);
5543 newoffset
= INTVAL (XEXP (newaddr
, 1));
5547 if (REGNO (addr_reg
) != base_regno
5548 || newoffset
!= offset
+ 4 * i
)
5555 /* A validation routine: say whether CODE, a condition code, and MODE
5556 match. The other alternatives either don't make sense or should
5557 never be generated. */
5560 validate_condition_mode (code
, mode
)
5562 enum machine_mode mode
;
5564 if (GET_RTX_CLASS (code
) != '<'
5565 || GET_MODE_CLASS (mode
) != MODE_CC
)
5568 /* These don't make sense. */
5569 if ((code
== GT
|| code
== LT
|| code
== GE
|| code
== LE
)
5570 && mode
== CCUNSmode
)
5573 if ((code
== GTU
|| code
== LTU
|| code
== GEU
|| code
== LEU
)
5574 && mode
!= CCUNSmode
)
5577 if (mode
!= CCFPmode
5578 && (code
== ORDERED
|| code
== UNORDERED
5579 || code
== UNEQ
|| code
== LTGT
5580 || code
== UNGT
|| code
== UNLT
5581 || code
== UNGE
|| code
== UNLE
))
5584 /* These should never be generated except for
5585 flag_unsafe_math_optimizations. */
5586 if (mode
== CCFPmode
5587 && ! flag_unsafe_math_optimizations
5588 && (code
== LE
|| code
== GE
5589 || code
== UNEQ
|| code
== LTGT
5590 || code
== UNGT
|| code
== UNLT
))
5593 /* These are invalid; the information is not there. */
5594 if (mode
== CCEQmode
5595 && code
!= EQ
&& code
!= NE
)
5599 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
5600 We only check the opcode against the mode of the CC value here. */
5603 branch_comparison_operator (op
, mode
)
5605 enum machine_mode mode ATTRIBUTE_UNUSED
;
5607 enum rtx_code code
= GET_CODE (op
);
5608 enum machine_mode cc_mode
;
5610 if (GET_RTX_CLASS (code
) != '<')
5613 cc_mode
= GET_MODE (XEXP (op
, 0));
5614 if (GET_MODE_CLASS (cc_mode
) != MODE_CC
)
5617 validate_condition_mode (code
, cc_mode
);
5622 /* Return 1 if OP is a comparison operation that is valid for a branch
5623 insn and which is true if the corresponding bit in the CC register
5627 branch_positive_comparison_operator (op
, mode
)
5629 enum machine_mode mode
;
5633 if (! branch_comparison_operator (op
, mode
))
5636 code
= GET_CODE (op
);
5637 return (code
== EQ
|| code
== LT
|| code
== GT
5638 || code
== LTU
|| code
== GTU
5639 || code
== UNORDERED
);
5642 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
5643 We check the opcode against the mode of the CC value and disallow EQ or
5644 NE comparisons for integers. */
5647 scc_comparison_operator (op
, mode
)
5649 enum machine_mode mode
;
5651 enum rtx_code code
= GET_CODE (op
);
5652 enum machine_mode cc_mode
;
5654 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
5657 if (GET_RTX_CLASS (code
) != '<')
5660 cc_mode
= GET_MODE (XEXP (op
, 0));
5661 if (GET_MODE_CLASS (cc_mode
) != MODE_CC
)
5664 validate_condition_mode (code
, cc_mode
);
5666 if (code
== NE
&& cc_mode
!= CCFPmode
)
5673 trap_comparison_operator (op
, mode
)
5675 enum machine_mode mode
;
5677 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
5679 return GET_RTX_CLASS (GET_CODE (op
)) == '<';
5683 boolean_operator (op
, mode
)
5685 enum machine_mode mode ATTRIBUTE_UNUSED
;
5687 enum rtx_code code
= GET_CODE (op
);
5688 return (code
== AND
|| code
== IOR
|| code
== XOR
);
5692 boolean_or_operator (op
, mode
)
5694 enum machine_mode mode ATTRIBUTE_UNUSED
;
5696 enum rtx_code code
= GET_CODE (op
);
5697 return (code
== IOR
|| code
== XOR
);
5701 min_max_operator (op
, mode
)
5703 enum machine_mode mode ATTRIBUTE_UNUSED
;
5705 enum rtx_code code
= GET_CODE (op
);
5706 return (code
== SMIN
|| code
== SMAX
|| code
== UMIN
|| code
== UMAX
);
5709 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
5710 mask required to convert the result of a rotate insn into a shift
5711 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
5714 includes_lshift_p (shiftop
, andop
)
5718 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
5720 shift_mask
<<= INTVAL (shiftop
);
5722 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
5725 /* Similar, but for right shift. */
5728 includes_rshift_p (shiftop
, andop
)
5732 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
5734 shift_mask
>>= INTVAL (shiftop
);
5736 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
5739 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
5740 to perform a left shift. It must have exactly SHIFTOP least
5741 signifigant 0's, then one or more 1's, then zero or more 0's. */
5744 includes_rldic_lshift_p (shiftop
, andop
)
5748 if (GET_CODE (andop
) == CONST_INT
)
5750 HOST_WIDE_INT c
, lsb
, shift_mask
;
5753 if (c
== 0 || c
== ~0)
5757 shift_mask
<<= INTVAL (shiftop
);
5759 /* Find the least signifigant one bit. */
5762 /* It must coincide with the LSB of the shift mask. */
5763 if (-lsb
!= shift_mask
)
5766 /* Invert to look for the next transition (if any). */
5769 /* Remove the low group of ones (originally low group of zeros). */
5772 /* Again find the lsb, and check we have all 1's above. */
5776 else if (GET_CODE (andop
) == CONST_DOUBLE
5777 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
5779 HOST_WIDE_INT low
, high
, lsb
;
5780 HOST_WIDE_INT shift_mask_low
, shift_mask_high
;
5782 low
= CONST_DOUBLE_LOW (andop
);
5783 if (HOST_BITS_PER_WIDE_INT
< 64)
5784 high
= CONST_DOUBLE_HIGH (andop
);
5786 if ((low
== 0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== 0))
5787 || (low
== ~0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0)))
5790 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
5792 shift_mask_high
= ~0;
5793 if (INTVAL (shiftop
) > 32)
5794 shift_mask_high
<<= INTVAL (shiftop
) - 32;
5798 if (-lsb
!= shift_mask_high
|| INTVAL (shiftop
) < 32)
5805 return high
== -lsb
;
5808 shift_mask_low
= ~0;
5809 shift_mask_low
<<= INTVAL (shiftop
);
5813 if (-lsb
!= shift_mask_low
)
5816 if (HOST_BITS_PER_WIDE_INT
< 64)
5821 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
5824 return high
== -lsb
;
5828 return low
== -lsb
&& (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0);
5834 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
5835 to perform a left shift. It must have SHIFTOP or more least
5836 signifigant 0's, with the remainder of the word 1's. */
5839 includes_rldicr_lshift_p (shiftop
, andop
)
5843 if (GET_CODE (andop
) == CONST_INT
)
5845 HOST_WIDE_INT c
, lsb
, shift_mask
;
5848 shift_mask
<<= INTVAL (shiftop
);
5851 /* Find the least signifigant one bit. */
5854 /* It must be covered by the shift mask.
5855 This test also rejects c == 0. */
5856 if ((lsb
& shift_mask
) == 0)
5859 /* Check we have all 1's above the transition, and reject all 1's. */
5860 return c
== -lsb
&& lsb
!= 1;
5862 else if (GET_CODE (andop
) == CONST_DOUBLE
5863 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
5865 HOST_WIDE_INT low
, lsb
, shift_mask_low
;
5867 low
= CONST_DOUBLE_LOW (andop
);
5869 if (HOST_BITS_PER_WIDE_INT
< 64)
5871 HOST_WIDE_INT high
, shift_mask_high
;
5873 high
= CONST_DOUBLE_HIGH (andop
);
5877 shift_mask_high
= ~0;
5878 if (INTVAL (shiftop
) > 32)
5879 shift_mask_high
<<= INTVAL (shiftop
) - 32;
5883 if ((lsb
& shift_mask_high
) == 0)
5886 return high
== -lsb
;
5892 shift_mask_low
= ~0;
5893 shift_mask_low
<<= INTVAL (shiftop
);
5897 if ((lsb
& shift_mask_low
) == 0)
5900 return low
== -lsb
&& lsb
!= 1;
5906 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
5907 for lfq and stfq insns.
5909 Note reg1 and reg2 *must* be hard registers. To be sure we will
5910 abort if we are passed pseudo registers. */
5913 registers_ok_for_quad_peep (reg1
, reg2
)
5916 /* We might have been passed a SUBREG. */
5917 if (GET_CODE (reg1
) != REG
|| GET_CODE (reg2
) != REG
)
5920 return (REGNO (reg1
) == REGNO (reg2
) - 1);
5923 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
5924 addr1 and addr2 must be in consecutive memory locations
5925 (addr2 == addr1 + 8). */
5928 addrs_ok_for_quad_peep (addr1
, addr2
)
5935 /* Extract an offset (if used) from the first addr. */
5936 if (GET_CODE (addr1
) == PLUS
)
5938 /* If not a REG, return zero. */
5939 if (GET_CODE (XEXP (addr1
, 0)) != REG
)
5943 reg1
= REGNO (XEXP (addr1
, 0));
5944 /* The offset must be constant! */
5945 if (GET_CODE (XEXP (addr1
, 1)) != CONST_INT
)
5947 offset1
= INTVAL (XEXP (addr1
, 1));
5950 else if (GET_CODE (addr1
) != REG
)
5954 reg1
= REGNO (addr1
);
5955 /* This was a simple (mem (reg)) expression. Offset is 0. */
5959 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
5960 if (GET_CODE (addr2
) != PLUS
)
5963 if (GET_CODE (XEXP (addr2
, 0)) != REG
5964 || GET_CODE (XEXP (addr2
, 1)) != CONST_INT
)
5967 if (reg1
!= REGNO (XEXP (addr2
, 0)))
5970 /* The offset for the second addr must be 8 more than the first addr. */
5971 if (INTVAL (XEXP (addr2
, 1)) != offset1
+ 8)
5974 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
5979 /* Return the register class of a scratch register needed to copy IN into
5980 or out of a register in CLASS in MODE. If it can be done directly,
5981 NO_REGS is returned. */
5984 secondary_reload_class (class, mode
, in
)
5985 enum reg_class
class;
5986 enum machine_mode mode ATTRIBUTE_UNUSED
;
5991 if (TARGET_ELF
|| (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))
5993 /* We cannot copy a symbolic operand directly into anything
5994 other than BASE_REGS for TARGET_ELF. So indicate that a
5995 register from BASE_REGS is needed as an intermediate
5998 On Darwin, pic addresses require a load from memory, which
5999 needs a base register. */
6000 if (class != BASE_REGS
6001 && (GET_CODE (in
) == SYMBOL_REF
6002 || GET_CODE (in
) == HIGH
6003 || GET_CODE (in
) == LABEL_REF
6004 || GET_CODE (in
) == CONST
))
6008 if (GET_CODE (in
) == REG
)
6011 if (regno
>= FIRST_PSEUDO_REGISTER
)
6013 regno
= true_regnum (in
);
6014 if (regno
>= FIRST_PSEUDO_REGISTER
)
6018 else if (GET_CODE (in
) == SUBREG
)
6020 regno
= true_regnum (in
);
6021 if (regno
>= FIRST_PSEUDO_REGISTER
)
6027 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
6029 if (class == GENERAL_REGS
|| class == BASE_REGS
6030 || (regno
>= 0 && INT_REGNO_P (regno
)))
6033 /* Constants, memory, and FP registers can go into FP registers. */
6034 if ((regno
== -1 || FP_REGNO_P (regno
))
6035 && (class == FLOAT_REGS
|| class == NON_SPECIAL_REGS
))
6038 /* Memory, and AltiVec registers can go into AltiVec registers. */
6039 if ((regno
== -1 || ALTIVEC_REGNO_P (regno
))
6040 && class == ALTIVEC_REGS
)
6043 /* We can copy among the CR registers. */
6044 if ((class == CR_REGS
|| class == CR0_REGS
)
6045 && regno
>= 0 && CR_REGNO_P (regno
))
6048 /* Otherwise, we need GENERAL_REGS. */
6049 return GENERAL_REGS
;
6052 /* Given a comparison operation, return the bit number in CCR to test. We
6053 know this is a valid comparison.
6055 SCC_P is 1 if this is for an scc. That means that %D will have been
6056 used instead of %C, so the bits will be in different places.
6058 Return -1 if OP isn't a valid comparison for some reason. */
6065 enum rtx_code code
= GET_CODE (op
);
6066 enum machine_mode cc_mode
;
6071 if (GET_RTX_CLASS (code
) != '<')
6076 if (GET_CODE (reg
) != REG
6077 || ! CR_REGNO_P (REGNO (reg
)))
6080 cc_mode
= GET_MODE (reg
);
6081 cc_regnum
= REGNO (reg
);
6082 base_bit
= 4 * (cc_regnum
- CR0_REGNO
);
6084 validate_condition_mode (code
, cc_mode
);
6089 return scc_p
? base_bit
+ 3 : base_bit
+ 2;
6091 return base_bit
+ 2;
6092 case GT
: case GTU
: case UNLE
:
6093 return base_bit
+ 1;
6094 case LT
: case LTU
: case UNGE
:
6096 case ORDERED
: case UNORDERED
:
6097 return base_bit
+ 3;
6100 /* If scc, we will have done a cror to put the bit in the
6101 unordered position. So test that bit. For integer, this is ! LT
6102 unless this is an scc insn. */
6103 return scc_p
? base_bit
+ 3 : base_bit
;
6106 return scc_p
? base_bit
+ 3 : base_bit
+ 1;
6113 /* Return the GOT register. */
6116 rs6000_got_register (value
)
6117 rtx value ATTRIBUTE_UNUSED
;
6119 /* The second flow pass currently (June 1999) can't update
6120 regs_ever_live without disturbing other parts of the compiler, so
6121 update it here to make the prolog/epilogue code happy. */
6122 if (no_new_pseudos
&& ! regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
])
6123 regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
] = 1;
6125 current_function_uses_pic_offset_table
= 1;
6127 return pic_offset_table_rtx
;
6130 /* Functions to init, mark and free struct machine_function.
6131 These will be called, via pointer variables,
6132 from push_function_context and pop_function_context. */
6135 rs6000_init_machine_status (p
)
6138 p
->machine
= (machine_function
*) xcalloc (1, sizeof (machine_function
));
6142 rs6000_free_machine_status (p
)
6145 if (p
->machine
== NULL
)
6153 /* Print an operand. Recognize special options, documented below. */
6156 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
6157 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
6159 #define SMALL_DATA_RELOC "sda21"
6160 #define SMALL_DATA_REG 0
6164 print_operand (file
, x
, code
)
6172 /* These macros test for integers and extract the low-order bits. */
6174 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
6175 && GET_MODE (X) == VOIDmode)
6177 #define INT_LOWPART(X) \
6178 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
6183 /* Write out an instruction after the call which may be replaced
6184 with glue code by the loader. This depends on the AIX version. */
6185 asm_fprintf (file
, RS6000_CALL_GLUE
);
6188 /* %a is output_address. */
6191 /* If X is a constant integer whose low-order 5 bits are zero,
6192 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
6193 in the AIX assembler where "sri" with a zero shift count
6194 writes a trash instruction. */
6195 if (GET_CODE (x
) == CONST_INT
&& (INTVAL (x
) & 31) == 0)
6202 /* If constant, low-order 16 bits of constant, unsigned.
6203 Otherwise, write normally. */
6205 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 0xffff);
6207 print_operand (file
, x
, 0);
6211 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
6212 for 64-bit mask direction. */
6213 putc (((INT_LOWPART(x
) & 1) == 0 ? 'r' : 'l'), file
);
6216 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
6220 /* There used to be a comment for 'C' reading "This is an
6221 optional cror needed for certain floating-point
6222 comparisons. Otherwise write nothing." */
6224 /* Similar, except that this is for an scc, so we must be able to
6225 encode the test in a single bit that is one. We do the above
6226 for any LE, GE, GEU, or LEU and invert the bit for NE. */
6227 if (GET_CODE (x
) == LE
|| GET_CODE (x
) == GE
6228 || GET_CODE (x
) == LEU
|| GET_CODE (x
) == GEU
)
6230 int base_bit
= 4 * (REGNO (XEXP (x
, 0)) - CR0_REGNO
);
6232 fprintf (file
, "cror %d,%d,%d\n\t", base_bit
+ 3,
6234 base_bit
+ (GET_CODE (x
) == GE
|| GET_CODE (x
) == GEU
));
6237 else if (GET_CODE (x
) == NE
)
6239 int base_bit
= 4 * (REGNO (XEXP (x
, 0)) - CR0_REGNO
);
6241 fprintf (file
, "crnor %d,%d,%d\n\t", base_bit
+ 3,
6242 base_bit
+ 2, base_bit
+ 2);
6247 /* X is a CR register. Print the number of the EQ bit of the CR */
6248 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
6249 output_operand_lossage ("invalid %%E value");
6251 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
) + 2);
6255 /* X is a CR register. Print the shift count needed to move it
6256 to the high-order four bits. */
6257 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
6258 output_operand_lossage ("invalid %%f value");
6260 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
));
6264 /* Similar, but print the count for the rotate in the opposite
6266 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
6267 output_operand_lossage ("invalid %%F value");
6269 fprintf (file
, "%d", 32 - 4 * (REGNO (x
) - CR0_REGNO
));
6273 /* X is a constant integer. If it is negative, print "m",
6274 otherwise print "z". This is to make a aze or ame insn. */
6275 if (GET_CODE (x
) != CONST_INT
)
6276 output_operand_lossage ("invalid %%G value");
6277 else if (INTVAL (x
) >= 0)
6284 /* If constant, output low-order five bits. Otherwise, write
6287 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 31);
6289 print_operand (file
, x
, 0);
6293 /* If constant, output low-order six bits. Otherwise, write
6296 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 63);
6298 print_operand (file
, x
, 0);
6302 /* Print `i' if this is a constant, else nothing. */
6308 /* Write the bit number in CCR for jump. */
6311 output_operand_lossage ("invalid %%j code");
6313 fprintf (file
, "%d", i
);
6317 /* Similar, but add one for shift count in rlinm for scc and pass
6318 scc flag to `ccr_bit'. */
6321 output_operand_lossage ("invalid %%J code");
6323 /* If we want bit 31, write a shift count of zero, not 32. */
6324 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
6328 /* X must be a constant. Write the 1's complement of the
6331 output_operand_lossage ("invalid %%k value");
6333 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ~ INT_LOWPART (x
));
6337 /* X must be a symbolic constant on ELF. Write an
6338 expression suitable for an 'addi' that adds in the low 16
6340 if (GET_CODE (x
) != CONST
)
6342 print_operand_address (file
, x
);
6347 if (GET_CODE (XEXP (x
, 0)) != PLUS
6348 || (GET_CODE (XEXP (XEXP (x
, 0), 0)) != SYMBOL_REF
6349 && GET_CODE (XEXP (XEXP (x
, 0), 0)) != LABEL_REF
)
6350 || GET_CODE (XEXP (XEXP (x
, 0), 1)) != CONST_INT
)
6351 output_operand_lossage ("invalid %%K value");
6352 print_operand_address (file
, XEXP (XEXP (x
, 0), 0));
6354 print_operand (file
, XEXP (XEXP (x
, 0), 1), 0);
6358 /* %l is output_asm_label. */
6361 /* Write second word of DImode or DFmode reference. Works on register
6362 or non-indexed memory only. */
6363 if (GET_CODE (x
) == REG
)
6364 fprintf (file
, "%s", reg_names
[REGNO (x
) + 1]);
6365 else if (GET_CODE (x
) == MEM
)
6367 /* Handle possible auto-increment. Since it is pre-increment and
6368 we have already done it, we can just use an offset of word. */
6369 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
6370 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
6371 output_address (plus_constant (XEXP (XEXP (x
, 0), 0),
6374 output_address (XEXP (adjust_address_nv (x
, SImode
,
6378 if (small_data_operand (x
, GET_MODE (x
)))
6379 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
6380 reg_names
[SMALL_DATA_REG
]);
6385 /* MB value for a mask operand. */
6386 if (! mask_operand (x
, SImode
))
6387 output_operand_lossage ("invalid %%m value");
6389 val
= INT_LOWPART (x
);
6391 /* If the high bit is set and the low bit is not, the value is zero.
6392 If the high bit is zero, the value is the first 1 bit we find from
6394 if ((val
& 0x80000000) && ((val
& 1) == 0))
6399 else if ((val
& 0x80000000) == 0)
6401 for (i
= 1; i
< 32; i
++)
6402 if ((val
<<= 1) & 0x80000000)
6404 fprintf (file
, "%d", i
);
6408 /* Otherwise, look for the first 0 bit from the right. The result is its
6409 number plus 1. We know the low-order bit is one. */
6410 for (i
= 0; i
< 32; i
++)
6411 if (((val
>>= 1) & 1) == 0)
6414 /* If we ended in ...01, i would be 0. The correct value is 31, so
6416 fprintf (file
, "%d", 31 - i
);
6420 /* ME value for a mask operand. */
6421 if (! mask_operand (x
, SImode
))
6422 output_operand_lossage ("invalid %%M value");
6424 val
= INT_LOWPART (x
);
6426 /* If the low bit is set and the high bit is not, the value is 31.
6427 If the low bit is zero, the value is the first 1 bit we find from
6429 if ((val
& 1) && ((val
& 0x80000000) == 0))
6434 else if ((val
& 1) == 0)
6436 for (i
= 0; i
< 32; i
++)
6437 if ((val
>>= 1) & 1)
6440 /* If we had ....10, i would be 0. The result should be
6441 30, so we need 30 - i. */
6442 fprintf (file
, "%d", 30 - i
);
6446 /* Otherwise, look for the first 0 bit from the left. The result is its
6447 number minus 1. We know the high-order bit is one. */
6448 for (i
= 0; i
< 32; i
++)
6449 if (((val
<<= 1) & 0x80000000) == 0)
6452 fprintf (file
, "%d", i
);
6455 /* %n outputs the negative of its operand. */
6458 /* Write the number of elements in the vector times 4. */
6459 if (GET_CODE (x
) != PARALLEL
)
6460 output_operand_lossage ("invalid %%N value");
6462 fprintf (file
, "%d", XVECLEN (x
, 0) * 4);
6466 /* Similar, but subtract 1 first. */
6467 if (GET_CODE (x
) != PARALLEL
)
6468 output_operand_lossage ("invalid %%O value");
6470 fprintf (file
, "%d", (XVECLEN (x
, 0) - 1) * 4);
6474 /* X is a CONST_INT that is a power of two. Output the logarithm. */
6476 || INT_LOWPART (x
) < 0
6477 || (i
= exact_log2 (INT_LOWPART (x
))) < 0)
6478 output_operand_lossage ("invalid %%p value");
6480 fprintf (file
, "%d", i
);
6484 /* The operand must be an indirect memory reference. The result
6485 is the register number. */
6486 if (GET_CODE (x
) != MEM
|| GET_CODE (XEXP (x
, 0)) != REG
6487 || REGNO (XEXP (x
, 0)) >= 32)
6488 output_operand_lossage ("invalid %%P value");
6490 fprintf (file
, "%d", REGNO (XEXP (x
, 0)));
6494 /* This outputs the logical code corresponding to a boolean
6495 expression. The expression may have one or both operands
6496 negated (if one, only the first one). For condition register
6497 logical operations, it will also treat the negated
6498 CR codes as NOTs, but not handle NOTs of them. */
6500 const char *const *t
= 0;
6502 enum rtx_code code
= GET_CODE (x
);
6503 static const char * const tbl
[3][3] = {
6504 { "and", "andc", "nor" },
6505 { "or", "orc", "nand" },
6506 { "xor", "eqv", "xor" } };
6510 else if (code
== IOR
)
6512 else if (code
== XOR
)
6515 output_operand_lossage ("invalid %%q value");
6517 if (GET_CODE (XEXP (x
, 0)) != NOT
)
6521 if (GET_CODE (XEXP (x
, 1)) == NOT
)
6532 /* X is a CR register. Print the mask for `mtcrf'. */
6533 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
6534 output_operand_lossage ("invalid %%R value");
6536 fprintf (file
, "%d", 128 >> (REGNO (x
) - CR0_REGNO
));
6540 /* Low 5 bits of 32 - value */
6542 output_operand_lossage ("invalid %%s value");
6544 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, (32 - INT_LOWPART (x
)) & 31);
6548 /* PowerPC64 mask position. All 0's and all 1's are excluded.
6549 CONST_INT 32-bit mask is considered sign-extended so any
6550 transition must occur within the CONST_INT, not on the boundary. */
6551 if (! mask64_operand (x
, DImode
))
6552 output_operand_lossage ("invalid %%S value");
6554 val
= INT_LOWPART (x
);
6556 if (val
& 1) /* Clear Left */
6558 for (i
= 0; i
< HOST_BITS_PER_WIDE_INT
; i
++)
6559 if (!((val
>>= 1) & 1))
6562 #if HOST_BITS_PER_WIDE_INT == 32
6563 if (GET_CODE (x
) == CONST_DOUBLE
&& i
== 32)
6565 val
= CONST_DOUBLE_HIGH (x
);
6570 for (i
= 32; i
< 64; i
++)
6571 if (!((val
>>= 1) & 1))
6575 /* i = index of last set bit from right
6576 mask begins at 63 - i from left */
6578 output_operand_lossage ("%%S computed all 1's mask");
6580 fprintf (file
, "%d", 63 - i
);
6583 else /* Clear Right */
6585 for (i
= 0; i
< HOST_BITS_PER_WIDE_INT
; i
++)
6586 if ((val
>>= 1) & 1)
6589 #if HOST_BITS_PER_WIDE_INT == 32
6590 if (GET_CODE (x
) == CONST_DOUBLE
&& i
== 32)
6592 val
= CONST_DOUBLE_HIGH (x
);
6594 if (val
== (HOST_WIDE_INT
) -1)
6597 for (i
= 32; i
< 64; i
++)
6598 if ((val
>>= 1) & 1)
6602 /* i = index of last clear bit from right
6603 mask ends at 62 - i from left */
6605 output_operand_lossage ("%%S computed all 0's mask");
6607 fprintf (file
, "%d", 62 - i
);
6612 /* Print the symbolic name of a branch target register. */
6613 if (GET_CODE (x
) != REG
|| (REGNO (x
) != LINK_REGISTER_REGNUM
6614 && REGNO (x
) != COUNT_REGISTER_REGNUM
))
6615 output_operand_lossage ("invalid %%T value");
6616 else if (REGNO (x
) == LINK_REGISTER_REGNUM
)
6617 fputs (TARGET_NEW_MNEMONICS
? "lr" : "r", file
);
6619 fputs ("ctr", file
);
6623 /* High-order 16 bits of constant for use in unsigned operand. */
6625 output_operand_lossage ("invalid %%u value");
6627 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
6628 (INT_LOWPART (x
) >> 16) & 0xffff);
6632 /* High-order 16 bits of constant for use in signed operand. */
6634 output_operand_lossage ("invalid %%v value");
6636 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
6637 (INT_LOWPART (x
) >> 16) & 0xffff);
6641 /* Print `u' if this has an auto-increment or auto-decrement. */
6642 if (GET_CODE (x
) == MEM
6643 && (GET_CODE (XEXP (x
, 0)) == PRE_INC
6644 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
))
6649 /* Print the trap code for this operand. */
6650 switch (GET_CODE (x
))
6653 fputs ("eq", file
); /* 4 */
6656 fputs ("ne", file
); /* 24 */
6659 fputs ("lt", file
); /* 16 */
6662 fputs ("le", file
); /* 20 */
6665 fputs ("gt", file
); /* 8 */
6668 fputs ("ge", file
); /* 12 */
6671 fputs ("llt", file
); /* 2 */
6674 fputs ("lle", file
); /* 6 */
6677 fputs ("lgt", file
); /* 1 */
6680 fputs ("lge", file
); /* 5 */
6688 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
6691 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
6692 ((INT_LOWPART (x
) & 0xffff) ^ 0x8000) - 0x8000);
6694 print_operand (file
, x
, 0);
6698 /* MB value for a PowerPC64 rldic operand. */
6699 val
= (GET_CODE (x
) == CONST_INT
6700 ? INTVAL (x
) : CONST_DOUBLE_HIGH (x
));
6705 for (i
= 0; i
< HOST_BITS_PER_WIDE_INT
; i
++)
6706 if ((val
<<= 1) < 0)
6709 #if HOST_BITS_PER_WIDE_INT == 32
6710 if (GET_CODE (x
) == CONST_INT
&& i
>= 0)
6711 i
+= 32; /* zero-extend high-part was all 0's */
6712 else if (GET_CODE (x
) == CONST_DOUBLE
&& i
== 32)
6714 val
= CONST_DOUBLE_LOW (x
);
6721 for ( ; i
< 64; i
++)
6722 if ((val
<<= 1) < 0)
6727 fprintf (file
, "%d", i
+ 1);
6731 if (GET_CODE (x
) == MEM
6732 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x
, 0), 0))
6737 /* Like 'L', for third word of TImode */
6738 if (GET_CODE (x
) == REG
)
6739 fprintf (file
, "%s", reg_names
[REGNO (x
) + 2]);
6740 else if (GET_CODE (x
) == MEM
)
6742 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
6743 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
6744 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 8));
6746 output_address (XEXP (adjust_address_nv (x
, SImode
, 8), 0));
6747 if (small_data_operand (x
, GET_MODE (x
)))
6748 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
6749 reg_names
[SMALL_DATA_REG
]);
6754 /* X is a SYMBOL_REF. Write out the name preceded by a
6755 period and without any trailing data in brackets. Used for function
6756 names. If we are configured for System V (or the embedded ABI) on
6757 the PowerPC, do not emit the period, since those systems do not use
6758 TOCs and the like. */
6759 if (GET_CODE (x
) != SYMBOL_REF
)
6762 if (XSTR (x
, 0)[0] != '.')
6764 switch (DEFAULT_ABI
)
6774 case ABI_AIX_NODESC
:
6780 RS6000_OUTPUT_BASENAME (file
, XSTR (x
, 0));
6782 assemble_name (file
, XSTR (x
, 0));
6787 /* Like 'L', for last word of TImode. */
6788 if (GET_CODE (x
) == REG
)
6789 fprintf (file
, "%s", reg_names
[REGNO (x
) + 3]);
6790 else if (GET_CODE (x
) == MEM
)
6792 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
6793 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
6794 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 12));
6796 output_address (XEXP (adjust_address_nv (x
, SImode
, 12), 0));
6797 if (small_data_operand (x
, GET_MODE (x
)))
6798 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
6799 reg_names
[SMALL_DATA_REG
]);
6803 /* Print AltiVec memory operand. */
6808 if (GET_CODE (x
) != MEM
)
6813 if (GET_CODE (tmp
) == REG
)
6814 fprintf (file
, "0,%s", reg_names
[REGNO (tmp
)]);
6815 else if (GET_CODE (tmp
) == PLUS
&& GET_CODE (XEXP (tmp
, 1)) == REG
)
6817 if (REGNO (XEXP (tmp
, 0)) == 0)
6818 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 1)) ],
6819 reg_names
[ REGNO (XEXP (tmp
, 0)) ]);
6821 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 0)) ],
6822 reg_names
[ REGNO (XEXP (tmp
, 1)) ]);
6830 if (GET_CODE (x
) == REG
)
6831 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
6832 else if (GET_CODE (x
) == MEM
)
6834 /* We need to handle PRE_INC and PRE_DEC here, since we need to
6835 know the width from the mode. */
6836 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
)
6837 fprintf (file
, "%d(%s)", GET_MODE_SIZE (GET_MODE (x
)),
6838 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
6839 else if (GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
6840 fprintf (file
, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x
)),
6841 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
6843 output_address (XEXP (x
, 0));
6846 output_addr_const (file
, x
);
6850 output_operand_lossage ("invalid %%xn code");
6854 /* Print the address of an operand. */
6857 print_operand_address (file
, x
)
6861 if (GET_CODE (x
) == REG
)
6862 fprintf (file
, "0(%s)", reg_names
[ REGNO (x
) ]);
6863 else if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == CONST
6864 || GET_CODE (x
) == LABEL_REF
)
6866 output_addr_const (file
, x
);
6867 if (small_data_operand (x
, GET_MODE (x
)))
6868 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
6869 reg_names
[SMALL_DATA_REG
]);
6870 else if (TARGET_TOC
)
6873 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == REG
)
6875 if (REGNO (XEXP (x
, 0)) == 0)
6876 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 1)) ],
6877 reg_names
[ REGNO (XEXP (x
, 0)) ]);
6879 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 0)) ],
6880 reg_names
[ REGNO (XEXP (x
, 1)) ]);
6882 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
)
6884 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (XEXP (x
, 1)));
6885 fprintf (file
, "(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
6888 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
6889 && CONSTANT_P (XEXP (x
, 1)))
6891 output_addr_const (file
, XEXP (x
, 1));
6892 fprintf (file
, "@l(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
6896 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
6897 && CONSTANT_P (XEXP (x
, 1)))
6899 fprintf (file
, "lo16(");
6900 output_addr_const (file
, XEXP (x
, 1));
6901 fprintf (file
, ")(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
6904 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x
))
6906 if (TARGET_AIX
&& (!TARGET_ELF
|| !TARGET_MINIMAL_TOC
))
6908 rtx contains_minus
= XEXP (x
, 1);
6912 /* Find the (minus (sym) (toc)) buried in X, and temporarily
6913 turn it into (sym) for output_addr_const. */
6914 while (GET_CODE (XEXP (contains_minus
, 0)) != MINUS
)
6915 contains_minus
= XEXP (contains_minus
, 0);
6917 minus
= XEXP (contains_minus
, 0);
6918 symref
= XEXP (minus
, 0);
6919 XEXP (contains_minus
, 0) = symref
;
6924 name
= XSTR (symref
, 0);
6925 newname
= alloca (strlen (name
) + sizeof ("@toc"));
6926 strcpy (newname
, name
);
6927 strcat (newname
, "@toc");
6928 XSTR (symref
, 0) = newname
;
6930 output_addr_const (file
, XEXP (x
, 1));
6932 XSTR (symref
, 0) = name
;
6933 XEXP (contains_minus
, 0) = minus
;
6936 output_addr_const (file
, XEXP (x
, 1));
6938 fprintf (file
, "(%s)", reg_names
[REGNO (XEXP (x
, 0))]);
6944 /* Target hook for assembling integer objects. The powerpc version has
6945 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
6946 is defined. It also needs to handle DI-mode objects on 64-bit
6950 rs6000_assemble_integer (x
, size
, aligned_p
)
6955 #ifdef RELOCATABLE_NEEDS_FIXUP
6956 /* Special handling for SI values. */
6957 if (size
== 4 && aligned_p
)
6959 extern int in_toc_section
PARAMS ((void));
6960 static int recurse
= 0;
6962 /* For -mrelocatable, we mark all addresses that need to be fixed up
6963 in the .fixup section. */
6964 if (TARGET_RELOCATABLE
6965 && !in_toc_section ()
6966 && !in_text_section ()
6968 && GET_CODE (x
) != CONST_INT
6969 && GET_CODE (x
) != CONST_DOUBLE
6975 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCP", fixuplabelno
);
6977 ASM_OUTPUT_LABEL (asm_out_file
, buf
);
6978 fprintf (asm_out_file
, "\t.long\t(");
6979 output_addr_const (asm_out_file
, x
);
6980 fprintf (asm_out_file
, ")@fixup\n");
6981 fprintf (asm_out_file
, "\t.section\t\".fixup\",\"aw\"\n");
6982 ASM_OUTPUT_ALIGN (asm_out_file
, 2);
6983 fprintf (asm_out_file
, "\t.long\t");
6984 assemble_name (asm_out_file
, buf
);
6985 fprintf (asm_out_file
, "\n\t.previous\n");
6989 /* Remove initial .'s to turn a -mcall-aixdesc function
6990 address into the address of the descriptor, not the function
6992 else if (GET_CODE (x
) == SYMBOL_REF
6993 && XSTR (x
, 0)[0] == '.'
6994 && DEFAULT_ABI
== ABI_AIX
)
6996 const char *name
= XSTR (x
, 0);
6997 while (*name
== '.')
7000 fprintf (asm_out_file
, "\t.long\t%s\n", name
);
7004 #endif /* RELOCATABLE_NEEDS_FIXUP */
7005 return default_assemble_integer (x
, size
, aligned_p
);
7009 rs6000_reverse_condition (mode
, code
)
7010 enum machine_mode mode
;
7013 /* Reversal of FP compares takes care -- an ordered compare
7014 becomes an unordered compare and vice versa. */
7015 if (mode
== CCFPmode
)
7016 return reverse_condition_maybe_unordered (code
);
7018 return reverse_condition (code
);
7021 /* Generate a compare for CODE. Return a brand-new rtx that
7022 represents the result of the compare. */
7025 rs6000_generate_compare (code
)
7028 enum machine_mode comp_mode
;
7031 if (rs6000_compare_fp_p
)
7032 comp_mode
= CCFPmode
;
7033 else if (code
== GTU
|| code
== LTU
7034 || code
== GEU
|| code
== LEU
)
7035 comp_mode
= CCUNSmode
;
7039 /* First, the compare. */
7040 compare_result
= gen_reg_rtx (comp_mode
);
7041 emit_insn (gen_rtx_SET (VOIDmode
, compare_result
,
7042 gen_rtx_COMPARE (comp_mode
,
7044 rs6000_compare_op1
)));
7046 /* Some kinds of FP comparisons need an OR operation;
7047 except for flag_unsafe_math_optimizations we don't bother. */
7048 if (rs6000_compare_fp_p
7049 && ! flag_unsafe_math_optimizations
7050 && (code
== LE
|| code
== GE
7051 || code
== UNEQ
|| code
== LTGT
7052 || code
== UNGT
|| code
== UNLT
))
7054 enum rtx_code or1
, or2
;
7055 rtx or1_rtx
, or2_rtx
, compare2_rtx
;
7056 rtx or_result
= gen_reg_rtx (CCEQmode
);
7060 case LE
: or1
= LT
; or2
= EQ
; break;
7061 case GE
: or1
= GT
; or2
= EQ
; break;
7062 case UNEQ
: or1
= UNORDERED
; or2
= EQ
; break;
7063 case LTGT
: or1
= LT
; or2
= GT
; break;
7064 case UNGT
: or1
= UNORDERED
; or2
= GT
; break;
7065 case UNLT
: or1
= UNORDERED
; or2
= LT
; break;
7068 validate_condition_mode (or1
, comp_mode
);
7069 validate_condition_mode (or2
, comp_mode
);
7070 or1_rtx
= gen_rtx (or1
, SImode
, compare_result
, const0_rtx
);
7071 or2_rtx
= gen_rtx (or2
, SImode
, compare_result
, const0_rtx
);
7072 compare2_rtx
= gen_rtx_COMPARE (CCEQmode
,
7073 gen_rtx_IOR (SImode
, or1_rtx
, or2_rtx
),
7075 emit_insn (gen_rtx_SET (VOIDmode
, or_result
, compare2_rtx
));
7077 compare_result
= or_result
;
7081 validate_condition_mode (code
, GET_MODE (compare_result
));
7083 return gen_rtx (code
, VOIDmode
, compare_result
, const0_rtx
);
7087 /* Emit the RTL for an sCOND pattern. */
7090 rs6000_emit_sCOND (code
, result
)
7095 enum machine_mode op_mode
;
7097 condition_rtx
= rs6000_generate_compare (code
);
7099 op_mode
= GET_MODE (rs6000_compare_op0
);
7100 if (op_mode
== VOIDmode
)
7101 op_mode
= GET_MODE (rs6000_compare_op1
);
7103 if (TARGET_POWERPC64
&& (op_mode
== DImode
|| rs6000_compare_fp_p
))
7105 PUT_MODE (condition_rtx
, DImode
);
7106 convert_move (result
, condition_rtx
, 0);
7110 PUT_MODE (condition_rtx
, SImode
);
7111 emit_insn (gen_rtx_SET (VOIDmode
, result
, condition_rtx
));
7115 /* Emit a branch of kind CODE to location LOC. */
7118 rs6000_emit_cbranch (code
, loc
)
7122 rtx condition_rtx
, loc_ref
;
7124 condition_rtx
= rs6000_generate_compare (code
);
7125 loc_ref
= gen_rtx_LABEL_REF (VOIDmode
, loc
);
7126 emit_jump_insn (gen_rtx_SET (VOIDmode
, pc_rtx
,
7127 gen_rtx_IF_THEN_ELSE (VOIDmode
, condition_rtx
,
7131 /* Return the string to output a conditional branch to LABEL, which is
7132 the operand number of the label, or -1 if the branch is really a
7135 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
7136 condition code register and its mode specifies what kind of
7139 REVERSED is non-zero if we should reverse the sense of the comparison.
7141 INSN is the insn. */
7144 output_cbranch (op
, label
, reversed
, insn
)
7150 static char string
[64];
7151 enum rtx_code code
= GET_CODE (op
);
7152 rtx cc_reg
= XEXP (op
, 0);
7153 enum machine_mode mode
= GET_MODE (cc_reg
);
7154 int cc_regno
= REGNO (cc_reg
) - CR0_REGNO
;
7155 int need_longbranch
= label
!= NULL
&& get_attr_length (insn
) == 8;
7156 int really_reversed
= reversed
^ need_longbranch
;
7162 validate_condition_mode (code
, mode
);
7164 /* Work out which way this really branches. We could use
7165 reverse_condition_maybe_unordered here always but this
7166 makes the resulting assembler clearer. */
7167 if (really_reversed
)
7168 code
= rs6000_reverse_condition (mode
, code
);
7172 /* Not all of these are actually distinct opcodes, but
7173 we distinguish them for clarity of the resulting assembler. */
7175 ccode
= "ne"; break;
7177 ccode
= "eq"; break;
7179 ccode
= "ge"; break;
7180 case GT
: case GTU
: case UNGT
:
7181 ccode
= "gt"; break;
7183 ccode
= "le"; break;
7184 case LT
: case LTU
: case UNLT
:
7185 ccode
= "lt"; break;
7186 case UNORDERED
: ccode
= "un"; break;
7187 case ORDERED
: ccode
= "nu"; break;
7188 case UNGE
: ccode
= "nl"; break;
7189 case UNLE
: ccode
= "ng"; break;
7194 /* Maybe we have a guess as to how likely the branch is.
7195 The old mnemonics don't have a way to specify this information. */
7196 note
= find_reg_note (insn
, REG_BR_PROB
, NULL_RTX
);
7197 if (note
!= NULL_RTX
)
7199 /* PROB is the difference from 50%. */
7200 int prob
= INTVAL (XEXP (note
, 0)) - REG_BR_PROB_BASE
/ 2;
7202 /* For branches that are very close to 50%, assume not-taken. */
7203 if (abs (prob
) > REG_BR_PROB_BASE
/ 20
7204 && ((prob
> 0) ^ need_longbranch
))
7213 s
+= sprintf (s
, "{b%sr|b%slr%s} ", ccode
, ccode
, pred
);
7215 s
+= sprintf (s
, "{b%s|b%s%s} ", ccode
, ccode
, pred
);
7217 /* We need to escape any '%' characters in the reg_names string.
7218 Assume they'd only be the first character... */
7219 if (reg_names
[cc_regno
+ CR0_REGNO
][0] == '%')
7221 s
+= sprintf (s
, "%s", reg_names
[cc_regno
+ CR0_REGNO
]);
7225 /* If the branch distance was too far, we may have to use an
7226 unconditional branch to go the distance. */
7227 if (need_longbranch
)
7228 s
+= sprintf (s
, ",$+8\n\tb %s", label
);
7230 s
+= sprintf (s
, ",%s", label
);
7236 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
7237 operands of the last comparison is nonzero/true, FALSE_COND if it
7238 is zero/false. Return 0 if the hardware has no such operation. */
7241 rs6000_emit_cmove (dest
, op
, true_cond
, false_cond
)
7247 enum rtx_code code
= GET_CODE (op
);
7248 rtx op0
= rs6000_compare_op0
;
7249 rtx op1
= rs6000_compare_op1
;
7251 enum machine_mode mode
= GET_MODE (op0
);
7254 /* First, work out if the hardware can do this at all, or
7255 if it's too slow... */
7256 /* If the comparison is an integer one, since we only have fsel
7257 it'll be cheaper to use a branch. */
7258 if (! rs6000_compare_fp_p
)
7261 /* Eliminate half of the comparisons by switching operands, this
7262 makes the remaining code simpler. */
7263 if (code
== UNLT
|| code
== UNGT
|| code
== UNORDERED
|| code
== NE
7264 || code
== LTGT
|| code
== LT
)
7266 code
= reverse_condition_maybe_unordered (code
);
7268 true_cond
= false_cond
;
7272 /* UNEQ and LTGT take four instructions for a comparison with zero,
7273 it'll probably be faster to use a branch here too. */
7277 if (GET_CODE (op1
) == CONST_DOUBLE
)
7278 REAL_VALUE_FROM_CONST_DOUBLE (c1
, op1
);
7280 /* We're going to try to implement comparions by performing
7281 a subtract, then comparing against zero. Unfortunately,
7282 Inf - Inf is NaN which is not zero, and so if we don't
7283 know that the the operand is finite and the comparison
7284 would treat EQ different to UNORDERED, we can't do it. */
7285 if (! flag_unsafe_math_optimizations
7286 && code
!= GT
&& code
!= UNGE
7287 && (GET_CODE (op1
) != CONST_DOUBLE
|| target_isinf (c1
))
7288 /* Constructs of the form (a OP b ? a : b) are safe. */
7289 && ((! rtx_equal_p (op0
, false_cond
) && ! rtx_equal_p (op1
, false_cond
))
7290 || (! rtx_equal_p (op0
, true_cond
)
7291 && ! rtx_equal_p (op1
, true_cond
))))
7293 /* At this point we know we can use fsel. */
7295 /* Reduce the comparison to a comparison against zero. */
7296 temp
= gen_reg_rtx (mode
);
7297 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
7298 gen_rtx_MINUS (mode
, op0
, op1
)));
7300 op1
= CONST0_RTX (mode
);
7302 /* If we don't care about NaNs we can reduce some of the comparisons
7303 down to faster ones. */
7304 if (flag_unsafe_math_optimizations
)
7310 true_cond
= false_cond
;
7323 /* Now, reduce everything down to a GE. */
7330 temp
= gen_reg_rtx (mode
);
7331 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (mode
, op0
)));
7336 temp
= gen_reg_rtx (mode
);
7337 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_ABS (mode
, op0
)));
7342 temp
= gen_reg_rtx (mode
);
7343 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
7345 gen_rtx_ABS (mode
, op0
))));
7350 temp
= gen_reg_rtx (mode
);
7351 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
7352 gen_rtx_IF_THEN_ELSE (mode
,
7353 gen_rtx_GE (VOIDmode
,
7355 true_cond
, false_cond
)));
7357 true_cond
= false_cond
;
7359 temp
= gen_reg_rtx (mode
);
7360 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (mode
, op0
)));
7365 temp
= gen_reg_rtx (mode
);
7366 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
7367 gen_rtx_IF_THEN_ELSE (mode
,
7368 gen_rtx_GE (VOIDmode
,
7370 true_cond
, false_cond
)));
7372 false_cond
= true_cond
;
7374 temp
= gen_reg_rtx (mode
);
7375 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (mode
, op0
)));
7383 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
7384 gen_rtx_IF_THEN_ELSE (GET_MODE (dest
),
7385 gen_rtx_GE (VOIDmode
,
7387 true_cond
, false_cond
)));
7392 rs6000_emit_minmax (dest
, code
, op0
, op1
)
7398 enum machine_mode mode
= GET_MODE (op0
);
7400 if (code
== SMAX
|| code
== UMAX
)
7401 target
= emit_conditional_move (dest
, GE
, op0
, op1
, mode
,
7404 target
= emit_conditional_move (dest
, GE
, op0
, op1
, mode
,
7406 if (target
== NULL_RTX
)
7409 emit_move_insn (dest
, target
);
7412 /* This page contains routines that are used to determine what the
7413 function prologue and epilogue code will do and write them out. */
7415 /* Return the first fixed-point register that is required to be
7416 saved. 32 if none. */
7419 first_reg_to_save ()
7423 /* Find lowest numbered live register. */
7424 for (first_reg
= 13; first_reg
<= 31; first_reg
++)
7425 if (regs_ever_live
[first_reg
]
7426 && (! call_used_regs
[first_reg
]
7427 || (first_reg
== PIC_OFFSET_TABLE_REGNUM
7428 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
7429 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
)))))
7432 if (current_function_profile
)
7434 /* AIX must save/restore every register that contains a parameter
7435 before/after the .__mcount call plus an additional register
7436 for the static chain, if needed; use registers from 30 down to 22
7438 if (DEFAULT_ABI
== ABI_AIX
|| DEFAULT_ABI
== ABI_DARWIN
)
7440 int last_parm_reg
, profile_first_reg
;
7442 /* Figure out last used parameter register. The proper thing
7443 to do is to walk incoming args of the function. A function
7444 might have live parameter registers even if it has no
7446 for (last_parm_reg
= 10;
7447 last_parm_reg
> 2 && ! regs_ever_live
[last_parm_reg
];
7451 /* Calculate first reg for saving parameter registers
7453 Skip reg 31 which may contain the frame pointer. */
7454 profile_first_reg
= (33 - last_parm_reg
7455 - (current_function_needs_context
? 1 : 0));
7457 /* Need to skip another reg to account for R31 being PICBASE
7458 (when flag_pic is set) or R30 being used as the frame
7459 pointer (when flag_pic is not set). */
7460 --profile_first_reg
;
7462 /* Do not save frame pointer if no parameters needs to be saved. */
7463 if (profile_first_reg
== 31)
7464 profile_first_reg
= 32;
7466 if (first_reg
> profile_first_reg
)
7467 first_reg
= profile_first_reg
;
7470 /* SVR4 may need one register to preserve the static chain. */
7471 else if (current_function_needs_context
)
7473 /* Skip reg 31 which may contain the frame pointer. */
7480 if (flag_pic
&& current_function_uses_pic_offset_table
&&
7481 (first_reg
> PIC_OFFSET_TABLE_REGNUM
))
7482 return PIC_OFFSET_TABLE_REGNUM
;
7488 /* Similar, for FP regs. */
7491 first_fp_reg_to_save ()
7495 /* Find lowest numbered live register. */
7496 for (first_reg
= 14 + 32; first_reg
<= 63; first_reg
++)
7497 if (regs_ever_live
[first_reg
])
7503 /* Similar, for AltiVec regs. */
7506 first_altivec_reg_to_save ()
7510 /* Stack frame remains as is unless we are in AltiVec ABI. */
7511 if (! TARGET_ALTIVEC_ABI
)
7512 return LAST_ALTIVEC_REGNO
+ 1;
7514 /* Find lowest numbered live register. */
7515 for (i
= FIRST_ALTIVEC_REGNO
+ 20; i
<= LAST_ALTIVEC_REGNO
; ++i
)
7516 if (regs_ever_live
[i
])
7522 /* Return a 32-bit mask of the AltiVec registers we need to set in
7523 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
7524 the 32-bit word is 0. */
7527 compute_vrsave_mask ()
7529 unsigned int i
, mask
= 0;
7531 /* First, find out if we use _any_ altivec registers. */
7532 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
7533 if (regs_ever_live
[i
])
7534 mask
|= ALTIVEC_REG_BIT (i
);
7539 /* Next, add all registers that are call-clobbered. We do this
7540 because post-reload register optimizers such as regrename_optimize
7541 may choose to use them. They never change the register class
7542 chosen by reload, so cannot create new uses of altivec registers
7543 if there were none before, so the early exit above is safe. */
7544 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
7545 altivec registers not saved in the mask, which might well make the
7546 adjustments below more effective in eliding the save/restore of
7547 VRSAVE in small functions. */
7548 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
7549 if (call_used_regs
[i
])
7550 mask
|= ALTIVEC_REG_BIT (i
);
7552 /* Next, remove the argument registers from the set. These must
7553 be in the VRSAVE mask set by the caller, so we don't need to add
7554 them in again. More importantly, the mask we compute here is
7555 used to generate CLOBBERs in the set_vrsave insn, and we do not
7556 wish the argument registers to die. */
7557 for (i
= cfun
->args_info
.vregno
; i
>= ALTIVEC_ARG_MIN_REG
; --i
)
7558 mask
&= ~ALTIVEC_REG_BIT (i
);
7560 /* Similarly, remove the return value from the set. */
7563 diddle_return_value (is_altivec_return_reg
, &yes
);
7565 mask
&= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN
);
7572 is_altivec_return_reg (reg
, xyes
)
7576 bool *yes
= (bool *) xyes
;
7577 if (REGNO (reg
) == ALTIVEC_ARG_RETURN
)
7582 /* Calculate the stack information for the current function. This is
7583 complicated by having two separate calling sequences, the AIX calling
7584 sequence and the V.4 calling sequence.
7586 AIX (and Darwin/Mac OS X) stack frames look like:
7588 SP----> +---------------------------------------+
7589 | back chain to caller | 0 0
7590 +---------------------------------------+
7591 | saved CR | 4 8 (8-11)
7592 +---------------------------------------+
7594 +---------------------------------------+
7595 | reserved for compilers | 12 24
7596 +---------------------------------------+
7597 | reserved for binders | 16 32
7598 +---------------------------------------+
7599 | saved TOC pointer | 20 40
7600 +---------------------------------------+
7601 | Parameter save area (P) | 24 48
7602 +---------------------------------------+
7603 | Alloca space (A) | 24+P etc.
7604 +---------------------------------------+
7605 | Local variable space (L) | 24+P+A
7606 +---------------------------------------+
7607 | Float/int conversion temporary (X) | 24+P+A+L
7608 +---------------------------------------+
7609 | Save area for AltiVec registers (W) | 24+P+A+L+X
7610 +---------------------------------------+
7611 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
7612 +---------------------------------------+
7613 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
7614 +---------------------------------------+
7615 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
7616 +---------------------------------------+
7617 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
7618 +---------------------------------------+
7619 old SP->| back chain to caller's caller |
7620 +---------------------------------------+
7622 The required alignment for AIX configurations is two words (i.e., 8
7626 V.4 stack frames look like:
7628 SP----> +---------------------------------------+
7629 | back chain to caller | 0
7630 +---------------------------------------+
7631 | caller's saved LR | 4
7632 +---------------------------------------+
7633 | Parameter save area (P) | 8
7634 +---------------------------------------+
7635 | Alloca space (A) | 8+P
7636 +---------------------------------------+
7637 | Varargs save area (V) | 8+P+A
7638 +---------------------------------------+
7639 | Local variable space (L) | 8+P+A+V
7640 +---------------------------------------+
7641 | Float/int conversion temporary (X) | 8+P+A+V+L
7642 +---------------------------------------+
7643 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
7644 +---------------------------------------+
7645 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
7646 +---------------------------------------+
7647 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
7648 +---------------------------------------+
7649 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
7650 +---------------------------------------+
7651 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
7652 +---------------------------------------+
7653 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
7654 +---------------------------------------+
7655 old SP->| back chain to caller's caller |
7656 +---------------------------------------+
7658 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
7659 given. (But note below and in sysv4.h that we require only 8 and
7660 may round up the size of our stack frame anyways. The historical
7661 reason is early versions of powerpc-linux which didn't properly
7662 align the stack at program startup. A happy side-effect is that
7663 -mno-eabi libraries can be used with -meabi programs.)
7665 The EABI configuration defaults to the V.4 layout, unless
7666 -mcall-aix is used, in which case the AIX layout is used. However,
7667 the stack alignment requirements may differ. If -mno-eabi is not
7668 given, the required stack alignment is 8 bytes; if -mno-eabi is
7669 given, the required alignment is 16 bytes. (But see V.4 comment
7672 #ifndef ABI_STACK_BOUNDARY
7673 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
7677 rs6000_stack_info ()
7679 static rs6000_stack_t info
, zero_info
;
7680 rs6000_stack_t
*info_ptr
= &info
;
7681 int reg_size
= TARGET_POWERPC64
? 8 : 4;
7682 enum rs6000_abi abi
;
7686 /* Zero all fields portably. */
7689 /* Select which calling sequence. */
7690 info_ptr
->abi
= abi
= DEFAULT_ABI
;
7692 /* Calculate which registers need to be saved & save area size. */
7693 info_ptr
->first_gp_reg_save
= first_reg_to_save ();
7694 /* Assume that we will have to save PIC_OFFSET_TABLE_REGNUM,
7695 even if it currently looks like we won't. */
7696 if (((TARGET_TOC
&& TARGET_MINIMAL_TOC
)
7697 || (flag_pic
== 1 && abi
== ABI_V4
)
7698 || (flag_pic
&& abi
== ABI_DARWIN
))
7699 && info_ptr
->first_gp_reg_save
> PIC_OFFSET_TABLE_REGNUM
)
7700 info_ptr
->gp_size
= reg_size
* (32 - PIC_OFFSET_TABLE_REGNUM
);
7702 info_ptr
->gp_size
= reg_size
* (32 - info_ptr
->first_gp_reg_save
);
7704 info_ptr
->first_fp_reg_save
= first_fp_reg_to_save ();
7705 info_ptr
->fp_size
= 8 * (64 - info_ptr
->first_fp_reg_save
);
7707 info_ptr
->first_altivec_reg_save
= first_altivec_reg_to_save ();
7708 info_ptr
->altivec_size
= 16 * (LAST_ALTIVEC_REGNO
+ 1
7709 - info_ptr
->first_altivec_reg_save
);
7711 /* Does this function call anything? */
7712 info_ptr
->calls_p
= (! current_function_is_leaf
7713 || cfun
->machine
->ra_needs_full_frame
);
7715 /* Determine if we need to save the link register. */
7716 if (rs6000_ra_ever_killed ()
7717 || (DEFAULT_ABI
== ABI_AIX
&& current_function_profile
)
7718 #ifdef TARGET_RELOCATABLE
7719 || (TARGET_RELOCATABLE
&& (get_pool_size () != 0))
7721 || (info_ptr
->first_fp_reg_save
!= 64
7722 && !FP_SAVE_INLINE (info_ptr
->first_fp_reg_save
))
7723 || info_ptr
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
7724 || (abi
== ABI_V4
&& current_function_calls_alloca
)
7725 || (DEFAULT_ABI
== ABI_DARWIN
7727 && current_function_uses_pic_offset_table
)
7728 || info_ptr
->calls_p
)
7730 info_ptr
->lr_save_p
= 1;
7731 regs_ever_live
[LINK_REGISTER_REGNUM
] = 1;
7734 /* Determine if we need to save the condition code registers. */
7735 if (regs_ever_live
[CR2_REGNO
]
7736 || regs_ever_live
[CR3_REGNO
]
7737 || regs_ever_live
[CR4_REGNO
])
7739 info_ptr
->cr_save_p
= 1;
7741 info_ptr
->cr_size
= reg_size
;
7744 /* If the current function calls __builtin_eh_return, then we need
7745 to allocate stack space for registers that will hold data for
7746 the exception handler. */
7747 if (current_function_calls_eh_return
)
7750 for (i
= 0; EH_RETURN_DATA_REGNO (i
) != INVALID_REGNUM
; ++i
)
7752 ehrd_size
= i
* UNITS_PER_WORD
;
7757 /* Determine various sizes. */
7758 info_ptr
->reg_size
= reg_size
;
7759 info_ptr
->fixed_size
= RS6000_SAVE_AREA
;
7760 info_ptr
->varargs_size
= RS6000_VARARGS_AREA
;
7761 info_ptr
->vars_size
= RS6000_ALIGN (get_frame_size (), 8);
7762 info_ptr
->parm_size
= RS6000_ALIGN (current_function_outgoing_args_size
,
7765 if (TARGET_ALTIVEC_ABI
)
7767 info_ptr
->vrsave_mask
= compute_vrsave_mask ();
7768 info_ptr
->vrsave_size
= info_ptr
->vrsave_mask
? 4 : 0;
7772 info_ptr
->vrsave_mask
= 0;
7773 info_ptr
->vrsave_size
= 0;
7776 /* Calculate the offsets. */
7784 case ABI_AIX_NODESC
:
7786 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
7787 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
7789 if (TARGET_ALTIVEC_ABI
)
7791 info_ptr
->vrsave_save_offset
7792 = info_ptr
->gp_save_offset
- info_ptr
->vrsave_size
;
7794 /* Align stack so vector save area is on a quadword boundary. */
7795 if (info_ptr
->altivec_size
!= 0)
7796 info_ptr
->altivec_padding_size
7797 = 16 - (-info_ptr
->vrsave_save_offset
% 16);
7799 info_ptr
->altivec_padding_size
= 0;
7801 info_ptr
->altivec_save_offset
7802 = info_ptr
->vrsave_save_offset
7803 - info_ptr
->altivec_padding_size
7804 - info_ptr
->altivec_size
;
7806 /* Adjust for AltiVec case. */
7807 info_ptr
->ehrd_offset
= info_ptr
->altivec_save_offset
- ehrd_size
;
7810 info_ptr
->ehrd_offset
= info_ptr
->gp_save_offset
- ehrd_size
;
7811 info_ptr
->cr_save_offset
= reg_size
; /* first word when 64-bit. */
7812 info_ptr
->lr_save_offset
= 2*reg_size
;
7816 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
7817 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
7818 info_ptr
->cr_save_offset
= info_ptr
->gp_save_offset
- info_ptr
->cr_size
;
7820 if (TARGET_ALTIVEC_ABI
)
7822 info_ptr
->vrsave_save_offset
7823 = info_ptr
->cr_save_offset
- info_ptr
->vrsave_size
;
7825 /* Align stack so vector save area is on a quadword boundary. */
7826 if (info_ptr
->altivec_size
!= 0)
7827 info_ptr
->altivec_padding_size
7828 = 16 - (-info_ptr
->vrsave_save_offset
% 16);
7830 info_ptr
->altivec_padding_size
= 0;
7832 info_ptr
->altivec_save_offset
7833 = info_ptr
->vrsave_save_offset
7834 - info_ptr
->altivec_padding_size
7835 - info_ptr
->altivec_size
;
7837 /* Adjust for AltiVec case. */
7838 info_ptr
->toc_save_offset
7839 = info_ptr
->altivec_save_offset
- info_ptr
->toc_size
;
7842 info_ptr
->toc_save_offset
= info_ptr
->cr_save_offset
- info_ptr
->toc_size
;
7843 info_ptr
->ehrd_offset
= info_ptr
->toc_save_offset
- ehrd_size
;
7844 info_ptr
->lr_save_offset
= reg_size
;
7848 info_ptr
->save_size
= RS6000_ALIGN (info_ptr
->fp_size
7850 + info_ptr
->altivec_size
7851 + info_ptr
->altivec_padding_size
7852 + info_ptr
->vrsave_size
7856 + info_ptr
->vrsave_size
7857 + info_ptr
->toc_size
,
7858 (TARGET_ALTIVEC_ABI
|| ABI_DARWIN
)
7861 total_raw_size
= (info_ptr
->vars_size
7862 + info_ptr
->parm_size
7863 + info_ptr
->save_size
7864 + info_ptr
->varargs_size
7865 + info_ptr
->fixed_size
);
7867 info_ptr
->total_size
=
7868 RS6000_ALIGN (total_raw_size
, ABI_STACK_BOUNDARY
/ BITS_PER_UNIT
);
7870 /* Determine if we need to allocate any stack frame:
7872 For AIX we need to push the stack if a frame pointer is needed
7873 (because the stack might be dynamically adjusted), if we are
7874 debugging, if we make calls, or if the sum of fp_save, gp_save,
7875 and local variables are more than the space needed to save all
7876 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
7877 + 18*8 = 288 (GPR13 reserved).
7879 For V.4 we don't have the stack cushion that AIX uses, but assume
7880 that the debugger can handle stackless frames. */
7882 if (info_ptr
->calls_p
)
7883 info_ptr
->push_p
= 1;
7885 else if (abi
== ABI_V4
)
7886 info_ptr
->push_p
= total_raw_size
> info_ptr
->fixed_size
;
7889 info_ptr
->push_p
= (frame_pointer_needed
7890 || (abi
!= ABI_DARWIN
&& write_symbols
!= NO_DEBUG
)
7891 || ((total_raw_size
- info_ptr
->fixed_size
)
7892 > (TARGET_32BIT
? 220 : 288)));
7894 /* Zero offsets if we're not saving those registers. */
7895 if (info_ptr
->fp_size
== 0)
7896 info_ptr
->fp_save_offset
= 0;
7898 if (info_ptr
->gp_size
== 0)
7899 info_ptr
->gp_save_offset
= 0;
7901 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->altivec_size
== 0)
7902 info_ptr
->altivec_save_offset
= 0;
7904 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->vrsave_mask
== 0)
7905 info_ptr
->vrsave_save_offset
= 0;
7907 if (! info_ptr
->lr_save_p
)
7908 info_ptr
->lr_save_offset
= 0;
7910 if (! info_ptr
->cr_save_p
)
7911 info_ptr
->cr_save_offset
= 0;
7913 if (! info_ptr
->toc_save_p
)
7914 info_ptr
->toc_save_offset
= 0;
7920 debug_stack_info (info
)
7921 rs6000_stack_t
*info
;
7923 const char *abi_string
;
7926 info
= rs6000_stack_info ();
7928 fprintf (stderr
, "\nStack information for function %s:\n",
7929 ((current_function_decl
&& DECL_NAME (current_function_decl
))
7930 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl
))
7935 default: abi_string
= "Unknown"; break;
7936 case ABI_NONE
: abi_string
= "NONE"; break;
7938 case ABI_AIX_NODESC
: abi_string
= "AIX"; break;
7939 case ABI_DARWIN
: abi_string
= "Darwin"; break;
7940 case ABI_V4
: abi_string
= "V.4"; break;
7943 fprintf (stderr
, "\tABI = %5s\n", abi_string
);
7945 if (TARGET_ALTIVEC_ABI
)
7946 fprintf (stderr
, "\tALTIVEC ABI extensions enabled.\n");
7948 if (info
->first_gp_reg_save
!= 32)
7949 fprintf (stderr
, "\tfirst_gp_reg_save = %5d\n", info
->first_gp_reg_save
);
7951 if (info
->first_fp_reg_save
!= 64)
7952 fprintf (stderr
, "\tfirst_fp_reg_save = %5d\n", info
->first_fp_reg_save
);
7954 if (info
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
)
7955 fprintf (stderr
, "\tfirst_altivec_reg_save = %5d\n",
7956 info
->first_altivec_reg_save
);
7958 if (info
->lr_save_p
)
7959 fprintf (stderr
, "\tlr_save_p = %5d\n", info
->lr_save_p
);
7961 if (info
->cr_save_p
)
7962 fprintf (stderr
, "\tcr_save_p = %5d\n", info
->cr_save_p
);
7964 if (info
->toc_save_p
)
7965 fprintf (stderr
, "\ttoc_save_p = %5d\n", info
->toc_save_p
);
7967 if (info
->vrsave_mask
)
7968 fprintf (stderr
, "\tvrsave_mask = 0x%x\n", info
->vrsave_mask
);
7971 fprintf (stderr
, "\tpush_p = %5d\n", info
->push_p
);
7974 fprintf (stderr
, "\tcalls_p = %5d\n", info
->calls_p
);
7976 if (info
->gp_save_offset
)
7977 fprintf (stderr
, "\tgp_save_offset = %5d\n", info
->gp_save_offset
);
7979 if (info
->fp_save_offset
)
7980 fprintf (stderr
, "\tfp_save_offset = %5d\n", info
->fp_save_offset
);
7982 if (info
->altivec_save_offset
)
7983 fprintf (stderr
, "\taltivec_save_offset = %5d\n",
7984 info
->altivec_save_offset
);
7986 if (info
->vrsave_save_offset
)
7987 fprintf (stderr
, "\tvrsave_save_offset = %5d\n",
7988 info
->vrsave_save_offset
);
7990 if (info
->lr_save_offset
)
7991 fprintf (stderr
, "\tlr_save_offset = %5d\n", info
->lr_save_offset
);
7993 if (info
->cr_save_offset
)
7994 fprintf (stderr
, "\tcr_save_offset = %5d\n", info
->cr_save_offset
);
7996 if (info
->toc_save_offset
)
7997 fprintf (stderr
, "\ttoc_save_offset = %5d\n", info
->toc_save_offset
);
7999 if (info
->varargs_save_offset
)
8000 fprintf (stderr
, "\tvarargs_save_offset = %5d\n", info
->varargs_save_offset
);
8002 if (info
->total_size
)
8003 fprintf (stderr
, "\ttotal_size = %5d\n", info
->total_size
);
8005 if (info
->varargs_size
)
8006 fprintf (stderr
, "\tvarargs_size = %5d\n", info
->varargs_size
);
8008 if (info
->vars_size
)
8009 fprintf (stderr
, "\tvars_size = %5d\n", info
->vars_size
);
8011 if (info
->parm_size
)
8012 fprintf (stderr
, "\tparm_size = %5d\n", info
->parm_size
);
8014 if (info
->fixed_size
)
8015 fprintf (stderr
, "\tfixed_size = %5d\n", info
->fixed_size
);
8018 fprintf (stderr
, "\tgp_size = %5d\n", info
->gp_size
);
8021 fprintf (stderr
, "\tfp_size = %5d\n", info
->fp_size
);
8023 if (info
->altivec_size
)
8024 fprintf (stderr
, "\taltivec_size = %5d\n", info
->altivec_size
);
8026 if (info
->vrsave_size
)
8027 fprintf (stderr
, "\tvrsave_size = %5d\n", info
->vrsave_size
);
8029 if (info
->altivec_padding_size
)
8030 fprintf (stderr
, "\taltivec_padding_size= %5d\n",
8031 info
->altivec_padding_size
);
8034 fprintf (stderr
, "\tlr_size = %5d\n", info
->lr_size
);
8037 fprintf (stderr
, "\tcr_size = %5d\n", info
->cr_size
);
8040 fprintf (stderr
, "\ttoc_size = %5d\n", info
->toc_size
);
8042 if (info
->save_size
)
8043 fprintf (stderr
, "\tsave_size = %5d\n", info
->save_size
);
8045 if (info
->reg_size
!= 4)
8046 fprintf (stderr
, "\treg_size = %5d\n", info
->reg_size
);
8048 fprintf (stderr
, "\n");
8052 rs6000_return_addr (count
, frame
)
8056 /* Currently we don't optimize very well between prolog and body
8057 code and for PIC code the code can be actually quite bad, so
8058 don't try to be too clever here. */
8061 || DEFAULT_ABI
== ABI_AIX
8062 || DEFAULT_ABI
== ABI_AIX_NODESC
)
8064 cfun
->machine
->ra_needs_full_frame
= 1;
8071 plus_constant (copy_to_reg
8072 (gen_rtx_MEM (Pmode
,
8073 memory_address (Pmode
, frame
))),
8074 RETURN_ADDRESS_OFFSET
)));
8077 return get_hard_reg_initial_val (Pmode
, LINK_REGISTER_REGNUM
);
8081 rs6000_ra_ever_killed ()
8085 #ifdef ASM_OUTPUT_MI_THUNK
8086 if (current_function_is_thunk
)
8089 if (!has_hard_reg_initial_val (Pmode
, LINK_REGISTER_REGNUM
)
8090 || cfun
->machine
->ra_needs_full_frame
)
8091 return regs_ever_live
[LINK_REGISTER_REGNUM
];
8093 push_topmost_sequence ();
8095 pop_topmost_sequence ();
8097 return reg_set_between_p (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
),
8101 /* Add a REG_MAYBE_DEAD note to the insn. */
8103 rs6000_maybe_dead (insn
)
8106 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
,
8111 /* Emit instructions needed to load the TOC register.
8112 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
8113 a constant pool; or for SVR4 -fpic. */
8116 rs6000_emit_load_toc_table (fromprolog
)
8120 dest
= gen_rtx_REG (Pmode
, PIC_OFFSET_TABLE_REGNUM
);
8122 if (TARGET_ELF
&& DEFAULT_ABI
!= ABI_AIX
)
8124 if (DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
8126 rtx temp
= (fromprolog
8127 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
8128 : gen_reg_rtx (Pmode
));
8129 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp
)));
8130 rs6000_maybe_dead (emit_move_insn (dest
, temp
));
8132 else if (flag_pic
== 2)
8135 rtx tempLR
= (fromprolog
8136 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
8137 : gen_reg_rtx (Pmode
));
8138 rtx temp0
= (fromprolog
8139 ? gen_rtx_REG (Pmode
, 0)
8140 : gen_reg_rtx (Pmode
));
8143 /* possibly create the toc section */
8144 if (! toc_initialized
)
8147 function_section (current_function_decl
);
8154 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCF", rs6000_pic_labelno
);
8155 symF
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
8157 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCL", rs6000_pic_labelno
);
8158 symL
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
8160 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR
,
8162 rs6000_maybe_dead (emit_move_insn (dest
, tempLR
));
8163 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0
, dest
,
8170 static int reload_toc_labelno
= 0;
8172 tocsym
= gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
);
8174 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCG", reload_toc_labelno
++);
8175 symF
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
8177 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR
,
8180 rs6000_maybe_dead (emit_move_insn (dest
, tempLR
));
8181 rs6000_maybe_dead (emit_move_insn (temp0
,
8182 gen_rtx_MEM (Pmode
, dest
)));
8184 rs6000_maybe_dead (emit_insn (gen_addsi3 (dest
, temp0
, dest
)));
8186 else if (flag_pic
== 0 && TARGET_MINIMAL_TOC
)
8188 /* This is for AIX code running in non-PIC ELF. */
8191 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCTOC", 1);
8192 realsym
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
8194 rs6000_maybe_dead (emit_insn (gen_elf_high (dest
, realsym
)));
8195 rs6000_maybe_dead (emit_insn (gen_elf_low (dest
, dest
, realsym
)));
8203 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest
)));
8205 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest
)));
8210 get_TOC_alias_set ()
8212 static int set
= -1;
8214 set
= new_alias_set ();
8218 /* This retuns nonzero if the current function uses the TOC. This is
8219 determined by the presence of (unspec ... 7), which is generated by
8220 the various load_toc_* patterns. */
8227 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
8230 rtx pat
= PATTERN (insn
);
8233 if (GET_CODE (pat
) == PARALLEL
)
8234 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
8235 if (GET_CODE (XVECEXP (PATTERN (insn
), 0, i
)) == UNSPEC
8236 && XINT (XVECEXP (PATTERN (insn
), 0, i
), 1) == 7)
8243 create_TOC_reference (symbol
)
8246 return gen_rtx_PLUS (Pmode
,
8247 gen_rtx_REG (Pmode
, TOC_REGISTER
),
8248 gen_rtx_CONST (Pmode
,
8249 gen_rtx_MINUS (Pmode
, symbol
,
8250 gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
))));
8254 /* __throw will restore its own return address to be the same as the
8255 return address of the function that the throw is being made to.
8256 This is unfortunate, because we want to check the original
8257 return address to see if we need to restore the TOC.
8258 So we have to squirrel it away here.
8259 This is used only in compiling __throw and __rethrow.
8261 Most of this code should be removed by CSE. */
8262 static rtx insn_after_throw
;
8264 /* This does the saving... */
8266 rs6000_aix_emit_builtin_unwind_init ()
8269 rtx stack_top
= gen_reg_rtx (Pmode
);
8270 rtx opcode_addr
= gen_reg_rtx (Pmode
);
8272 insn_after_throw
= gen_reg_rtx (SImode
);
8274 mem
= gen_rtx_MEM (Pmode
, hard_frame_pointer_rtx
);
8275 emit_move_insn (stack_top
, mem
);
8277 mem
= gen_rtx_MEM (Pmode
,
8278 gen_rtx_PLUS (Pmode
, stack_top
,
8279 GEN_INT (2 * GET_MODE_SIZE (Pmode
))));
8280 emit_move_insn (opcode_addr
, mem
);
8281 emit_move_insn (insn_after_throw
, gen_rtx_MEM (SImode
, opcode_addr
));
8284 /* Emit insns to _restore_ the TOC register, at runtime (specifically
8285 in _eh.o). Only used on AIX.
8287 The idea is that on AIX, function calls look like this:
8288 bl somefunction-trampoline
8292 somefunction-trampoline:
8294 ... load function address in the count register ...
8296 or like this, if the linker determines that this is not a cross-module call
8297 and so the TOC need not be restored:
8300 or like this, if the compiler could determine that this is not a
8303 now, the tricky bit here is that register 2 is saved and restored
8304 by the _linker_, so we can't readily generate debugging information
8305 for it. So we need to go back up the call chain looking at the
8306 insns at return addresses to see which calls saved the TOC register
8307 and so see where it gets restored from.
8309 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
8310 just before the actual epilogue.
8312 On the bright side, this incurs no space or time overhead unless an
8313 exception is thrown, except for the extra code in libgcc.a.
8315 The parameter STACKSIZE is a register containing (at runtime)
8316 the amount to be popped off the stack in addition to the stack frame
8317 of this routine (which will be __throw or __rethrow, and so is
8318 guaranteed to have a stack frame). */
8321 rs6000_emit_eh_toc_restore (stacksize
)
8325 rtx bottom_of_stack
= gen_reg_rtx (Pmode
);
8326 rtx tocompare
= gen_reg_rtx (SImode
);
8327 rtx opcode
= gen_reg_rtx (SImode
);
8328 rtx opcode_addr
= gen_reg_rtx (Pmode
);
8330 rtx loop_start
= gen_label_rtx ();
8331 rtx no_toc_restore_needed
= gen_label_rtx ();
8332 rtx loop_exit
= gen_label_rtx ();
8334 mem
= gen_rtx_MEM (Pmode
, hard_frame_pointer_rtx
);
8335 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
8336 emit_move_insn (bottom_of_stack
, mem
);
8338 top_of_stack
= expand_binop (Pmode
, add_optab
,
8339 bottom_of_stack
, stacksize
,
8340 NULL_RTX
, 1, OPTAB_WIDEN
);
8342 emit_move_insn (tocompare
,
8343 GEN_INT (trunc_int_for_mode (TARGET_32BIT
8345 : 0xE8410028, SImode
)));
8347 if (insn_after_throw
== NULL_RTX
)
8349 emit_move_insn (opcode
, insn_after_throw
);
8351 emit_note (NULL
, NOTE_INSN_LOOP_BEG
);
8352 emit_label (loop_start
);
8354 do_compare_rtx_and_jump (opcode
, tocompare
, NE
, 1,
8355 SImode
, NULL_RTX
, NULL_RTX
,
8356 no_toc_restore_needed
);
8358 mem
= gen_rtx_MEM (Pmode
,
8359 gen_rtx_PLUS (Pmode
, bottom_of_stack
,
8360 GEN_INT (5 * GET_MODE_SIZE (Pmode
))));
8361 emit_move_insn (gen_rtx_REG (Pmode
, 2), mem
);
8363 emit_label (no_toc_restore_needed
);
8364 do_compare_rtx_and_jump (top_of_stack
, bottom_of_stack
, EQ
, 1,
8365 Pmode
, NULL_RTX
, NULL_RTX
,
8368 mem
= gen_rtx_MEM (Pmode
, bottom_of_stack
);
8369 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
8370 emit_move_insn (bottom_of_stack
, mem
);
8372 mem
= gen_rtx_MEM (Pmode
,
8373 gen_rtx_PLUS (Pmode
, bottom_of_stack
,
8374 GEN_INT (2 * GET_MODE_SIZE (Pmode
))));
8375 emit_move_insn (opcode_addr
, mem
);
8376 emit_move_insn (opcode
, gen_rtx_MEM (SImode
, opcode_addr
));
8378 emit_note (NULL
, NOTE_INSN_LOOP_CONT
);
8379 emit_jump (loop_start
);
8380 emit_note (NULL
, NOTE_INSN_LOOP_END
);
8381 emit_label (loop_exit
);
8383 #endif /* TARGET_AIX */
8385 /* This ties together stack memory (MEM with an alias set of
8386 rs6000_sr_alias_set) and the change to the stack pointer. */
8389 rs6000_emit_stack_tie ()
8391 rtx mem
= gen_rtx_MEM (BLKmode
, gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
));
8393 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
8394 emit_insn (gen_stack_tie (mem
));
8397 /* Emit the correct code for allocating stack space, as insns.
8398 If COPY_R12, make sure a copy of the old frame is left in r12.
8399 The generated code may use hard register 0 as a temporary. */
8402 rs6000_emit_allocate_stack (size
, copy_r12
)
8407 rtx stack_reg
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
8408 rtx tmp_reg
= gen_rtx_REG (Pmode
, 0);
8409 rtx todec
= GEN_INT (-size
);
8411 if (current_function_limit_stack
)
8413 if (REG_P (stack_limit_rtx
)
8414 && REGNO (stack_limit_rtx
) > 1
8415 && REGNO (stack_limit_rtx
) <= 31)
8417 emit_insn (Pmode
== SImode
8418 ? gen_addsi3 (tmp_reg
,
8421 : gen_adddi3 (tmp_reg
,
8425 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
8428 else if (GET_CODE (stack_limit_rtx
) == SYMBOL_REF
8430 && DEFAULT_ABI
== ABI_V4
)
8432 rtx toload
= gen_rtx_CONST (VOIDmode
,
8433 gen_rtx_PLUS (Pmode
,
8437 emit_insn (gen_elf_high (tmp_reg
, toload
));
8438 emit_insn (gen_elf_low (tmp_reg
, tmp_reg
, toload
));
8439 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
8443 warning ("stack limit expression is not supported");
8446 if (copy_r12
|| ! TARGET_UPDATE
)
8447 emit_move_insn (gen_rtx_REG (Pmode
, 12), stack_reg
);
8453 /* Need a note here so that try_split doesn't get confused. */
8454 if (get_last_insn() == NULL_RTX
)
8455 emit_note (0, NOTE_INSN_DELETED
);
8456 insn
= emit_move_insn (tmp_reg
, todec
);
8457 try_split (PATTERN (insn
), insn
, 0);
8461 if (Pmode
== SImode
)
8462 insn
= emit_insn (gen_movsi_update (stack_reg
, stack_reg
,
8465 insn
= emit_insn (gen_movdi_update (stack_reg
, stack_reg
,
8470 if (Pmode
== SImode
)
8471 insn
= emit_insn (gen_addsi3 (stack_reg
, stack_reg
, todec
));
8473 insn
= emit_insn (gen_adddi3 (stack_reg
, stack_reg
, todec
));
8474 emit_move_insn (gen_rtx_MEM (Pmode
, stack_reg
),
8475 gen_rtx_REG (Pmode
, 12));
8478 RTX_FRAME_RELATED_P (insn
) = 1;
8480 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
8481 gen_rtx_SET (VOIDmode
, stack_reg
,
8482 gen_rtx_PLUS (Pmode
, stack_reg
,
8487 /* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
8490 (mem (plus (blah) (regXX)))
8494 (mem (plus (blah) (const VALUE_OF_REGXX))). */
8497 altivec_frame_fixup (insn
, reg
, val
)
8503 real
= copy_rtx (PATTERN (insn
));
8505 real
= replace_rtx (real
, reg
, GEN_INT (val
));
8507 RTX_FRAME_RELATED_P (insn
) = 1;
8508 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
8513 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
8514 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
8515 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
8516 deduce these equivalences by itself so it wasn't necessary to hold
8517 its hand so much. */
8520 rs6000_frame_related (insn
, reg
, val
, reg2
, rreg
)
8529 real
= copy_rtx (PATTERN (insn
));
8531 real
= replace_rtx (real
, reg
,
8532 gen_rtx_PLUS (Pmode
, gen_rtx_REG (Pmode
,
8533 STACK_POINTER_REGNUM
),
8536 /* We expect that 'real' is either a SET or a PARALLEL containing
8537 SETs (and possibly other stuff). In a PARALLEL, all the SETs
8538 are important so they all have to be marked RTX_FRAME_RELATED_P. */
8540 if (GET_CODE (real
) == SET
)
8544 temp
= simplify_rtx (SET_SRC (set
));
8546 SET_SRC (set
) = temp
;
8547 temp
= simplify_rtx (SET_DEST (set
));
8549 SET_DEST (set
) = temp
;
8550 if (GET_CODE (SET_DEST (set
)) == MEM
)
8552 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
8554 XEXP (SET_DEST (set
), 0) = temp
;
8557 else if (GET_CODE (real
) == PARALLEL
)
8560 for (i
= 0; i
< XVECLEN (real
, 0); i
++)
8561 if (GET_CODE (XVECEXP (real
, 0, i
)) == SET
)
8563 rtx set
= XVECEXP (real
, 0, i
);
8565 temp
= simplify_rtx (SET_SRC (set
));
8567 SET_SRC (set
) = temp
;
8568 temp
= simplify_rtx (SET_DEST (set
));
8570 SET_DEST (set
) = temp
;
8571 if (GET_CODE (SET_DEST (set
)) == MEM
)
8573 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
8575 XEXP (SET_DEST (set
), 0) = temp
;
8577 RTX_FRAME_RELATED_P (set
) = 1;
8583 if (reg2
!= NULL_RTX
)
8584 real
= replace_rtx (real
, reg2
, rreg
);
8586 RTX_FRAME_RELATED_P (insn
) = 1;
8587 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
8592 /* Returns an insn that has a vrsave set operation with the
8593 appropriate CLOBBERs. */
8596 generate_set_vrsave (reg
, info
, epiloguep
)
8598 rs6000_stack_t
*info
;
8602 rtx insn
, clobs
[TOTAL_ALTIVEC_REGS
+ 1];
8603 rtx vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
8606 = gen_rtx_SET (VOIDmode
,
8608 gen_rtx_UNSPEC_VOLATILE (SImode
,
8609 gen_rtvec (2, reg
, vrsave
),
8614 /* We need to clobber the registers in the mask so the scheduler
8615 does not move sets to VRSAVE before sets of AltiVec registers.
8617 However, if the function receives nonlocal gotos, reload will set
8618 all call saved registers live. We will end up with:
8620 (set (reg 999) (mem))
8621 (parallel [ (set (reg vrsave) (unspec blah))
8622 (clobber (reg 999))])
8624 The clobber will cause the store into reg 999 to be dead, and
8625 flow will attempt to delete an epilogue insn. In this case, we
8626 need an unspec use/set of the register. */
8628 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
8629 if (info
->vrsave_mask
!= 0 && ALTIVEC_REG_BIT (i
) != 0)
8631 if (!epiloguep
|| call_used_regs
[i
])
8632 clobs
[nclobs
++] = gen_rtx_CLOBBER (VOIDmode
,
8633 gen_rtx_REG (V4SImode
, i
));
8636 rtx reg
= gen_rtx_REG (V4SImode
, i
);
8639 = gen_rtx_SET (VOIDmode
,
8641 gen_rtx_UNSPEC (V4SImode
,
8642 gen_rtvec (1, reg
), 27));
8646 insn
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nclobs
));
8648 for (i
= 0; i
< nclobs
; ++i
)
8649 XVECEXP (insn
, 0, i
) = clobs
[i
];
8654 /* Emit function prologue as insns. */
8657 rs6000_emit_prologue ()
8659 rs6000_stack_t
*info
= rs6000_stack_info ();
8660 enum machine_mode reg_mode
= TARGET_POWERPC64
? DImode
: SImode
;
8661 int reg_size
= TARGET_POWERPC64
? 8 : 4;
8662 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
8663 rtx frame_ptr_rtx
= gen_rtx_REG (Pmode
, 12);
8664 rtx frame_reg_rtx
= sp_reg_rtx
;
8665 rtx cr_save_rtx
= NULL
;
8667 int saving_FPRs_inline
;
8668 int using_store_multiple
;
8669 HOST_WIDE_INT sp_offset
= 0;
8671 using_store_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
8672 && info
->first_gp_reg_save
< 31);
8673 saving_FPRs_inline
= (info
->first_fp_reg_save
== 64
8674 || FP_SAVE_INLINE (info
->first_fp_reg_save
));
8676 /* For V.4, update stack before we do any saving and set back pointer. */
8677 if (info
->push_p
&& DEFAULT_ABI
== ABI_V4
)
8679 if (info
->total_size
< 32767)
8680 sp_offset
= info
->total_size
;
8682 frame_reg_rtx
= frame_ptr_rtx
;
8683 rs6000_emit_allocate_stack (info
->total_size
,
8684 (frame_reg_rtx
!= sp_reg_rtx
8687 || info
->first_fp_reg_save
< 64
8688 || info
->first_gp_reg_save
< 32
8690 if (frame_reg_rtx
!= sp_reg_rtx
)
8691 rs6000_emit_stack_tie ();
8694 /* Save AltiVec registers if needed. */
8695 if (TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
8699 /* There should be a non inline version of this, for when we
8700 are saving lots of vector registers. */
8701 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
8702 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
8704 rtx areg
, savereg
, mem
;
8707 offset
= info
->altivec_save_offset
+ sp_offset
8708 + 16 * (i
- info
->first_altivec_reg_save
);
8710 savereg
= gen_rtx_REG (V4SImode
, i
);
8712 areg
= gen_rtx_REG (Pmode
, 0);
8713 emit_move_insn (areg
, GEN_INT (offset
));
8715 /* AltiVec addressing mode is [reg+reg]. */
8716 mem
= gen_rtx_MEM (V4SImode
,
8717 gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
));
8719 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
8721 insn
= emit_move_insn (mem
, savereg
);
8723 altivec_frame_fixup (insn
, areg
, offset
);
8727 /* VRSAVE is a bit vector representing which AltiVec registers
8728 are used. The OS uses this to determine which vector
8729 registers to save on a context switch. We need to save
8730 VRSAVE on the stack frame, add whatever AltiVec registers we
8731 used in this function, and do the corresponding magic in the
8734 if (TARGET_ALTIVEC
&& info
->vrsave_mask
!= 0)
8736 rtx reg
, mem
, vrsave
;
8739 /* Get VRSAVE onto a GPR. */
8740 reg
= gen_rtx_REG (SImode
, 12);
8741 vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
8743 emit_insn (gen_get_vrsave_internal (reg
));
8745 emit_insn (gen_rtx_SET (VOIDmode
, reg
, vrsave
));
8748 offset
= info
->vrsave_save_offset
+ sp_offset
;
8750 = gen_rtx_MEM (SImode
,
8751 gen_rtx_PLUS (Pmode
, frame_reg_rtx
, GEN_INT (offset
)));
8752 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
8753 insn
= emit_move_insn (mem
, reg
);
8755 /* Include the registers in the mask. */
8756 emit_insn (gen_iorsi3 (reg
, reg
, GEN_INT ((int) info
->vrsave_mask
)));
8758 insn
= emit_insn (generate_set_vrsave (reg
, info
, 0));
8761 /* If we use the link register, get it into r0. */
8762 if (info
->lr_save_p
)
8763 emit_move_insn (gen_rtx_REG (Pmode
, 0),
8764 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
8766 /* If we need to save CR, put it into r12. */
8767 if (info
->cr_save_p
&& frame_reg_rtx
!= frame_ptr_rtx
)
8769 cr_save_rtx
= gen_rtx_REG (SImode
, 12);
8770 emit_insn (gen_movesi_from_cr (cr_save_rtx
));
8773 /* Do any required saving of fpr's. If only one or two to save, do
8774 it ourselves. Otherwise, call function. */
8775 if (saving_FPRs_inline
)
8778 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
8779 if ((regs_ever_live
[info
->first_fp_reg_save
+i
]
8780 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
8783 reg
= gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
);
8784 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
8785 GEN_INT (info
->fp_save_offset
8788 mem
= gen_rtx_MEM (DFmode
, addr
);
8789 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
8791 insn
= emit_move_insn (mem
, reg
);
8792 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
8793 NULL_RTX
, NULL_RTX
);
8796 else if (info
->first_fp_reg_save
!= 64)
8800 const char *alloc_rname
;
8802 p
= rtvec_alloc (2 + 64 - info
->first_fp_reg_save
);
8804 RTVEC_ELT (p
, 0) = gen_rtx_CLOBBER (VOIDmode
,
8806 LINK_REGISTER_REGNUM
));
8807 sprintf (rname
, "%s%d%s", SAVE_FP_PREFIX
,
8808 info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
);
8809 alloc_rname
= ggc_strdup (rname
);
8810 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
8811 gen_rtx_SYMBOL_REF (Pmode
,
8813 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
8816 reg
= gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
);
8817 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
8818 GEN_INT (info
->fp_save_offset
8819 + sp_offset
+ 8*i
));
8820 mem
= gen_rtx_MEM (DFmode
, addr
);
8821 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
8823 RTVEC_ELT (p
, i
+ 2) = gen_rtx_SET (VOIDmode
, mem
, reg
);
8825 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
8826 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
8827 NULL_RTX
, NULL_RTX
);
8830 /* Save GPRs. This is done as a PARALLEL if we are using
8831 the store-multiple instructions. */
8832 if (using_store_multiple
)
8836 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
8837 dwarfp
= rtvec_alloc (32 - info
->first_gp_reg_save
);
8838 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
8841 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
8842 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
8843 GEN_INT (info
->gp_save_offset
8846 mem
= gen_rtx_MEM (reg_mode
, addr
);
8847 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
8849 RTVEC_ELT (p
, i
) = gen_rtx_SET (VOIDmode
, mem
, reg
);
8851 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
8852 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
8853 NULL_RTX
, NULL_RTX
);
8858 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
8859 if ((regs_ever_live
[info
->first_gp_reg_save
+i
]
8860 && ! call_used_regs
[info
->first_gp_reg_save
+i
])
8861 || (i
+info
->first_gp_reg_save
== PIC_OFFSET_TABLE_REGNUM
8862 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
8863 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))))
8866 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
8867 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
8868 GEN_INT (info
->gp_save_offset
8871 mem
= gen_rtx_MEM (reg_mode
, addr
);
8872 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
8874 insn
= emit_move_insn (mem
, reg
);
8875 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
8876 NULL_RTX
, NULL_RTX
);
8880 /* ??? There's no need to emit actual instructions here, but it's the
8881 easiest way to get the frame unwind information emitted. */
8882 if (current_function_calls_eh_return
)
8884 unsigned int i
, regno
;
8890 regno
= EH_RETURN_DATA_REGNO (i
);
8891 if (regno
== INVALID_REGNUM
)
8894 reg
= gen_rtx_REG (reg_mode
, regno
);
8895 addr
= plus_constant (frame_reg_rtx
,
8896 info
->ehrd_offset
+ sp_offset
8897 + reg_size
* (int) i
);
8898 mem
= gen_rtx_MEM (reg_mode
, addr
);
8899 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
8901 insn
= emit_move_insn (mem
, reg
);
8902 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
8903 NULL_RTX
, NULL_RTX
);
8907 /* Save lr if we used it. */
8908 if (info
->lr_save_p
)
8910 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
8911 GEN_INT (info
->lr_save_offset
+ sp_offset
));
8912 rtx reg
= gen_rtx_REG (Pmode
, 0);
8913 rtx mem
= gen_rtx_MEM (Pmode
, addr
);
8914 /* This should not be of rs6000_sr_alias_set, because of
8915 __builtin_return_address. */
8917 insn
= emit_move_insn (mem
, reg
);
8918 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
8919 reg
, gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
8922 /* Save CR if we use any that must be preserved. */
8923 if (info
->cr_save_p
)
8925 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
8926 GEN_INT (info
->cr_save_offset
+ sp_offset
));
8927 rtx mem
= gen_rtx_MEM (SImode
, addr
);
8929 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
8931 /* If r12 was used to hold the original sp, copy cr into r0 now
8933 if (REGNO (frame_reg_rtx
) == 12)
8935 cr_save_rtx
= gen_rtx_REG (SImode
, 0);
8936 emit_insn (gen_movesi_from_cr (cr_save_rtx
));
8938 insn
= emit_move_insn (mem
, cr_save_rtx
);
8940 /* Now, there's no way that dwarf2out_frame_debug_expr is going
8941 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
8942 OK. All we have to do is specify that _one_ condition code
8943 register is saved in this stack slot. The thrower's epilogue
8944 will then restore all the call-saved registers.
8945 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
8946 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
8947 cr_save_rtx
, gen_rtx_REG (SImode
, CR2_REGNO
));
8950 /* Update stack and set back pointer unless this is V.4,
8951 for which it was done previously. */
8952 if (info
->push_p
&& DEFAULT_ABI
!= ABI_V4
)
8953 rs6000_emit_allocate_stack (info
->total_size
, FALSE
);
8955 /* Set frame pointer, if needed. */
8956 if (frame_pointer_needed
)
8958 insn
= emit_move_insn (gen_rtx_REG (reg_mode
, FRAME_POINTER_REGNUM
),
8960 RTX_FRAME_RELATED_P (insn
) = 1;
8963 /* If we are using PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
8964 if ((TARGET_TOC
&& TARGET_MINIMAL_TOC
&& get_pool_size () != 0)
8965 || (DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1
8966 && regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
]))
8968 /* If emit_load_toc_table will use the link register, we need to save
8969 it. We use R11 for this purpose because emit_load_toc_table
8970 can use register 0. This allows us to use a plain 'blr' to return
8971 from the procedure more often. */
8972 int save_LR_around_toc_setup
= (TARGET_ELF
&& flag_pic
!= 0
8973 && ! info
->lr_save_p
8974 && EXIT_BLOCK_PTR
->pred
!= NULL
);
8975 if (save_LR_around_toc_setup
)
8976 emit_move_insn (gen_rtx_REG (Pmode
, 11),
8977 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
8979 rs6000_emit_load_toc_table (TRUE
);
8981 if (save_LR_around_toc_setup
)
8982 emit_move_insn (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
),
8983 gen_rtx_REG (Pmode
, 11));
8986 if (DEFAULT_ABI
== ABI_DARWIN
8987 && flag_pic
&& current_function_uses_pic_offset_table
)
8989 rtx dest
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
8991 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest
)));
8994 emit_move_insn (gen_rtx_REG (Pmode
, PIC_OFFSET_TABLE_REGNUM
),
8995 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)));
8999 /* Write function prologue. */
9002 rs6000_output_function_prologue (file
, size
)
9004 HOST_WIDE_INT size ATTRIBUTE_UNUSED
;
9006 rs6000_stack_t
*info
= rs6000_stack_info ();
9008 if (TARGET_DEBUG_STACK
)
9009 debug_stack_info (info
);
9011 /* Write .extern for any function we will call to save and restore
9013 if (info
->first_fp_reg_save
< 64
9014 && !FP_SAVE_INLINE (info
->first_fp_reg_save
))
9015 fprintf (file
, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
9016 SAVE_FP_PREFIX
, info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
,
9017 RESTORE_FP_PREFIX
, info
->first_fp_reg_save
- 32,
9020 /* Write .extern for AIX common mode routines, if needed. */
9021 if (! TARGET_POWER
&& ! TARGET_POWERPC
&& ! common_mode_defined
)
9023 fputs ("\t.extern __mulh\n", file
);
9024 fputs ("\t.extern __mull\n", file
);
9025 fputs ("\t.extern __divss\n", file
);
9026 fputs ("\t.extern __divus\n", file
);
9027 fputs ("\t.extern __quoss\n", file
);
9028 fputs ("\t.extern __quous\n", file
);
9029 common_mode_defined
= 1;
9032 if (! HAVE_prologue
)
9036 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
9037 the "toplevel" insn chain. */
9038 emit_note (0, NOTE_INSN_DELETED
);
9039 rs6000_emit_prologue ();
9040 emit_note (0, NOTE_INSN_DELETED
);
9042 if (TARGET_DEBUG_STACK
)
9043 debug_rtx_list (get_insns (), 100);
9044 final (get_insns (), file
, FALSE
, FALSE
);
9048 rs6000_pic_labelno
++;
9051 /* Emit function epilogue as insns.
9053 At present, dwarf2out_frame_debug_expr doesn't understand
9054 register restores, so we don't bother setting RTX_FRAME_RELATED_P
9055 anywhere in the epilogue. Most of the insns below would in any case
9056 need special notes to explain where r11 is in relation to the stack. */
9059 rs6000_emit_epilogue (sibcall
)
9062 rs6000_stack_t
*info
;
9063 int restoring_FPRs_inline
;
9064 int using_load_multiple
;
9065 int using_mfcr_multiple
;
9066 int use_backchain_to_restore_sp
;
9068 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, 1);
9069 rtx frame_reg_rtx
= sp_reg_rtx
;
9070 enum machine_mode reg_mode
= TARGET_POWERPC64
? DImode
: SImode
;
9071 int reg_size
= TARGET_POWERPC64
? 8 : 4;
9074 info
= rs6000_stack_info ();
9075 using_load_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
9076 && info
->first_gp_reg_save
< 31);
9077 restoring_FPRs_inline
= (sibcall
9078 || current_function_calls_eh_return
9079 || info
->first_fp_reg_save
== 64
9080 || FP_SAVE_INLINE (info
->first_fp_reg_save
));
9081 use_backchain_to_restore_sp
= (frame_pointer_needed
9082 || current_function_calls_alloca
9083 || info
->total_size
> 32767);
9084 using_mfcr_multiple
= (rs6000_cpu
== PROCESSOR_PPC601
9085 || rs6000_cpu
== PROCESSOR_PPC603
9086 || rs6000_cpu
== PROCESSOR_PPC750
9089 /* If we have a frame pointer, a call to alloca, or a large stack
9090 frame, restore the old stack pointer using the backchain. Otherwise,
9091 we know what size to update it with. */
9092 if (use_backchain_to_restore_sp
)
9094 /* Under V.4, don't reset the stack pointer until after we're done
9095 loading the saved registers. */
9096 if (DEFAULT_ABI
== ABI_V4
)
9097 frame_reg_rtx
= gen_rtx_REG (Pmode
, 11);
9099 emit_move_insn (frame_reg_rtx
,
9100 gen_rtx_MEM (Pmode
, sp_reg_rtx
));
9103 else if (info
->push_p
)
9105 if (DEFAULT_ABI
== ABI_V4
)
9106 sp_offset
= info
->total_size
;
9109 emit_insn (TARGET_32BIT
9110 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
9111 GEN_INT (info
->total_size
))
9112 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
9113 GEN_INT (info
->total_size
)));
9117 /* Restore AltiVec registers if needed. */
9118 if (TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
9122 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
9123 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
9125 rtx addr
, areg
, mem
;
9127 areg
= gen_rtx_REG (Pmode
, 0);
9129 (areg
, GEN_INT (info
->altivec_save_offset
9131 + 16 * (i
- info
->first_altivec_reg_save
)));
9133 /* AltiVec addressing mode is [reg+reg]. */
9134 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
);
9135 mem
= gen_rtx_MEM (V4SImode
, addr
);
9136 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
9138 emit_move_insn (gen_rtx_REG (V4SImode
, i
), mem
);
9142 /* Restore VRSAVE if needed. */
9143 if (TARGET_ALTIVEC_ABI
&& info
->vrsave_mask
!= 0)
9147 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
9148 GEN_INT (info
->vrsave_save_offset
+ sp_offset
));
9149 mem
= gen_rtx_MEM (SImode
, addr
);
9150 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
9151 reg
= gen_rtx_REG (SImode
, 12);
9152 emit_move_insn (reg
, mem
);
9154 emit_insn (generate_set_vrsave (reg
, info
, 1));
9157 /* Get the old lr if we saved it. */
9158 if (info
->lr_save_p
)
9160 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
9161 GEN_INT (info
->lr_save_offset
+ sp_offset
));
9162 rtx mem
= gen_rtx_MEM (Pmode
, addr
);
9164 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
9166 emit_move_insn (gen_rtx_REG (Pmode
, 0), mem
);
9169 /* Get the old cr if we saved it. */
9170 if (info
->cr_save_p
)
9172 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
9173 GEN_INT (info
->cr_save_offset
+ sp_offset
));
9174 rtx mem
= gen_rtx_MEM (SImode
, addr
);
9176 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
9178 emit_move_insn (gen_rtx_REG (SImode
, 12), mem
);
9181 /* Set LR here to try to overlap restores below. */
9182 if (info
->lr_save_p
)
9183 emit_move_insn (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
),
9184 gen_rtx_REG (Pmode
, 0));
9186 /* Load exception handler data registers, if needed. */
9187 if (current_function_calls_eh_return
)
9189 unsigned int i
, regno
;
9195 regno
= EH_RETURN_DATA_REGNO (i
);
9196 if (regno
== INVALID_REGNUM
)
9199 addr
= plus_constant (frame_reg_rtx
,
9200 info
->ehrd_offset
+ sp_offset
9201 + reg_size
* (int) i
);
9202 mem
= gen_rtx_MEM (reg_mode
, addr
);
9203 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
9205 emit_move_insn (gen_rtx_REG (reg_mode
, regno
), mem
);
9209 /* Restore GPRs. This is done as a PARALLEL if we are using
9210 the load-multiple instructions. */
9211 if (using_load_multiple
)
9214 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
9215 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
9217 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
9218 GEN_INT (info
->gp_save_offset
9221 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
9223 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
9226 gen_rtx_SET (VOIDmode
,
9227 gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
),
9230 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
9233 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
9234 if ((regs_ever_live
[info
->first_gp_reg_save
+i
]
9235 && ! call_used_regs
[info
->first_gp_reg_save
+i
])
9236 || (i
+info
->first_gp_reg_save
== PIC_OFFSET_TABLE_REGNUM
9237 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
9238 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))))
9240 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
9241 GEN_INT (info
->gp_save_offset
9244 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
9246 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
9248 emit_move_insn (gen_rtx_REG (reg_mode
,
9249 info
->first_gp_reg_save
+ i
),
9253 /* Restore fpr's if we need to do it without calling a function. */
9254 if (restoring_FPRs_inline
)
9255 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
9256 if ((regs_ever_live
[info
->first_fp_reg_save
+i
]
9257 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
9260 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
9261 GEN_INT (info
->fp_save_offset
9264 mem
= gen_rtx_MEM (DFmode
, addr
);
9265 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
9267 emit_move_insn (gen_rtx_REG (DFmode
,
9268 info
->first_fp_reg_save
+ i
),
9272 /* If we saved cr, restore it here. Just those that were used. */
9273 if (info
->cr_save_p
)
9275 rtx r12_rtx
= gen_rtx_REG (SImode
, 12);
9278 if (using_mfcr_multiple
)
9280 for (i
= 0; i
< 8; i
++)
9281 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
9287 if (using_mfcr_multiple
&& count
> 1)
9292 p
= rtvec_alloc (count
);
9295 for (i
= 0; i
< 8; i
++)
9296 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
9298 rtvec r
= rtvec_alloc (2);
9299 RTVEC_ELT (r
, 0) = r12_rtx
;
9300 RTVEC_ELT (r
, 1) = GEN_INT (1 << (7-i
));
9301 RTVEC_ELT (p
, ndx
) =
9302 gen_rtx_SET (VOIDmode
, gen_rtx_REG (CCmode
, CR0_REGNO
+i
),
9303 gen_rtx_UNSPEC (CCmode
, r
, 20));
9306 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
9311 for (i
= 0; i
< 8; i
++)
9312 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
9314 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode
,
9320 /* If this is V.4, unwind the stack pointer after all of the loads
9321 have been done. We need to emit a block here so that sched
9322 doesn't decide to move the sp change before the register restores
9323 (which may not have any obvious dependency on the stack). This
9324 doesn't hurt performance, because there is no scheduling that can
9325 be done after this point. */
9326 if (DEFAULT_ABI
== ABI_V4
)
9328 if (frame_reg_rtx
!= sp_reg_rtx
)
9329 rs6000_emit_stack_tie ();
9331 if (use_backchain_to_restore_sp
)
9333 emit_move_insn (sp_reg_rtx
, frame_reg_rtx
);
9335 else if (sp_offset
!= 0)
9337 emit_insn (Pmode
== SImode
9338 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
9339 GEN_INT (sp_offset
))
9340 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
9341 GEN_INT (sp_offset
)));
9345 if (current_function_calls_eh_return
)
9347 rtx sa
= EH_RETURN_STACKADJ_RTX
;
9348 emit_insn (Pmode
== SImode
9349 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
, sa
)
9350 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
, sa
));
9356 if (! restoring_FPRs_inline
)
9357 p
= rtvec_alloc (3 + 64 - info
->first_fp_reg_save
);
9359 p
= rtvec_alloc (2);
9361 RTVEC_ELT (p
, 0) = gen_rtx_RETURN (VOIDmode
);
9362 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
9364 LINK_REGISTER_REGNUM
));
9366 /* If we have to restore more than two FP registers, branch to the
9367 restore function. It will return to our caller. */
9368 if (! restoring_FPRs_inline
)
9372 const char *alloc_rname
;
9374 sprintf (rname
, "%s%d%s", RESTORE_FP_PREFIX
,
9375 info
->first_fp_reg_save
- 32, RESTORE_FP_SUFFIX
);
9376 alloc_rname
= ggc_strdup (rname
);
9377 RTVEC_ELT (p
, 2) = gen_rtx_USE (VOIDmode
,
9378 gen_rtx_SYMBOL_REF (Pmode
,
9381 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
9384 addr
= gen_rtx_PLUS (Pmode
, sp_reg_rtx
,
9385 GEN_INT (info
->fp_save_offset
+ 8*i
));
9386 mem
= gen_rtx_MEM (DFmode
, addr
);
9387 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
9389 RTVEC_ELT (p
, i
+3) =
9390 gen_rtx_SET (VOIDmode
,
9391 gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
),
9396 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
9400 /* Write function epilogue. */
9403 rs6000_output_function_epilogue (file
, size
)
9405 HOST_WIDE_INT size ATTRIBUTE_UNUSED
;
9407 rs6000_stack_t
*info
= rs6000_stack_info ();
9408 int optional_tbtab
= (optimize_size
|| TARGET_ELF
) ? 0 : 1;
9410 if (! HAVE_epilogue
)
9412 rtx insn
= get_last_insn ();
9413 /* If the last insn was a BARRIER, we don't have to write anything except
9415 if (GET_CODE (insn
) == NOTE
)
9416 insn
= prev_nonnote_insn (insn
);
9417 if (insn
== 0 || GET_CODE (insn
) != BARRIER
)
9419 /* This is slightly ugly, but at least we don't have two
9420 copies of the epilogue-emitting code. */
9423 /* A NOTE_INSN_DELETED is supposed to be at the start
9424 and end of the "toplevel" insn chain. */
9425 emit_note (0, NOTE_INSN_DELETED
);
9426 rs6000_emit_epilogue (FALSE
);
9427 emit_note (0, NOTE_INSN_DELETED
);
9429 if (TARGET_DEBUG_STACK
)
9430 debug_rtx_list (get_insns (), 100);
9431 final (get_insns (), file
, FALSE
, FALSE
);
9436 /* Output a traceback table here. See /usr/include/sys/debug.h for info
9439 We don't output a traceback table if -finhibit-size-directive was
9440 used. The documentation for -finhibit-size-directive reads
9441 ``don't output a @code{.size} assembler directive, or anything
9442 else that would cause trouble if the function is split in the
9443 middle, and the two halves are placed at locations far apart in
9444 memory.'' The traceback table has this property, since it
9445 includes the offset from the start of the function to the
9446 traceback table itself.
9448 System V.4 Powerpc's (and the embedded ABI derived from it) use a
9449 different traceback table. */
9450 if (DEFAULT_ABI
== ABI_AIX
&& ! flag_inhibit_size_directive
)
9452 const char *fname
= XSTR (XEXP (DECL_RTL (current_function_decl
), 0), 0);
9453 const char *language_string
= lang_hooks
.name
;
9454 int fixed_parms
= 0, float_parms
= 0, parm_info
= 0;
9457 while (*fname
== '.') /* V.4 encodes . in the name */
9460 /* Need label immediately before tbtab, so we can compute its offset
9461 from the function start. */
9464 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
9465 ASM_OUTPUT_LABEL (file
, fname
);
9467 /* The .tbtab pseudo-op can only be used for the first eight
9468 expressions, since it can't handle the possibly variable
9469 length fields that follow. However, if you omit the optional
9470 fields, the assembler outputs zeros for all optional fields
9471 anyways, giving each variable length field is minimum length
9472 (as defined in sys/debug.h). Thus we can not use the .tbtab
9473 pseudo-op at all. */
9475 /* An all-zero word flags the start of the tbtab, for debuggers
9476 that have to find it by searching forward from the entry
9477 point or from the current pc. */
9478 fputs ("\t.long 0\n", file
);
9480 /* Tbtab format type. Use format type 0. */
9481 fputs ("\t.byte 0,", file
);
9483 /* Language type. Unfortunately, there doesn't seem to be any
9484 official way to get this info, so we use language_string. C
9485 is 0. C++ is 9. No number defined for Obj-C, so use the
9486 value for C for now. There is no official value for Java,
9487 although IBM appears to be using 13. There is no official value
9488 for Chill, so we've chosen 44 pseudo-randomly. */
9489 if (! strcmp (language_string
, "GNU C")
9490 || ! strcmp (language_string
, "GNU Objective-C"))
9492 else if (! strcmp (language_string
, "GNU F77"))
9494 else if (! strcmp (language_string
, "GNU Ada"))
9496 else if (! strcmp (language_string
, "GNU Pascal"))
9498 else if (! strcmp (language_string
, "GNU C++"))
9500 else if (! strcmp (language_string
, "GNU Java"))
9502 else if (! strcmp (language_string
, "GNU CHILL"))
9506 fprintf (file
, "%d,", i
);
9508 /* 8 single bit fields: global linkage (not set for C extern linkage,
9509 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
9510 from start of procedure stored in tbtab, internal function, function
9511 has controlled storage, function has no toc, function uses fp,
9512 function logs/aborts fp operations. */
9513 /* Assume that fp operations are used if any fp reg must be saved. */
9514 fprintf (file
, "%d,",
9515 (optional_tbtab
<< 5) | ((info
->first_fp_reg_save
!= 64) << 1));
9517 /* 6 bitfields: function is interrupt handler, name present in
9518 proc table, function calls alloca, on condition directives
9519 (controls stack walks, 3 bits), saves condition reg, saves
9521 /* The `function calls alloca' bit seems to be set whenever reg 31 is
9522 set up as a frame pointer, even when there is no alloca call. */
9523 fprintf (file
, "%d,",
9524 ((optional_tbtab
<< 6)
9525 | ((optional_tbtab
& frame_pointer_needed
) << 5)
9526 | (info
->cr_save_p
<< 1)
9527 | (info
->lr_save_p
)));
9529 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
9531 fprintf (file
, "%d,",
9532 (info
->push_p
<< 7) | (64 - info
->first_fp_reg_save
));
9534 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
9535 fprintf (file
, "%d,", (32 - first_reg_to_save ()));
9539 /* Compute the parameter info from the function decl argument
9542 int next_parm_info_bit
= 31;
9544 for (decl
= DECL_ARGUMENTS (current_function_decl
);
9545 decl
; decl
= TREE_CHAIN (decl
))
9547 rtx parameter
= DECL_INCOMING_RTL (decl
);
9548 enum machine_mode mode
= GET_MODE (parameter
);
9550 if (GET_CODE (parameter
) == REG
)
9552 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
9560 else if (mode
== DFmode
)
9565 /* If only one bit will fit, don't or in this entry. */
9566 if (next_parm_info_bit
> 0)
9567 parm_info
|= (bits
<< (next_parm_info_bit
- 1));
9568 next_parm_info_bit
-= 2;
9572 fixed_parms
+= ((GET_MODE_SIZE (mode
)
9573 + (UNITS_PER_WORD
- 1))
9575 next_parm_info_bit
-= 1;
9581 /* Number of fixed point parameters. */
9582 /* This is actually the number of words of fixed point parameters; thus
9583 an 8 byte struct counts as 2; and thus the maximum value is 8. */
9584 fprintf (file
, "%d,", fixed_parms
);
9586 /* 2 bitfields: number of floating point parameters (7 bits), parameters
9588 /* This is actually the number of fp registers that hold parameters;
9589 and thus the maximum value is 13. */
9590 /* Set parameters on stack bit if parameters are not in their original
9591 registers, regardless of whether they are on the stack? Xlc
9592 seems to set the bit when not optimizing. */
9593 fprintf (file
, "%d\n", ((float_parms
<< 1) | (! optimize
)));
9595 if (! optional_tbtab
)
9598 /* Optional fields follow. Some are variable length. */
9600 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
9602 /* There is an entry for each parameter in a register, in the order that
9603 they occur in the parameter list. Any intervening arguments on the
9604 stack are ignored. If the list overflows a long (max possible length
9605 34 bits) then completely leave off all elements that don't fit. */
9606 /* Only emit this long if there was at least one parameter. */
9607 if (fixed_parms
|| float_parms
)
9608 fprintf (file
, "\t.long %d\n", parm_info
);
9610 /* Offset from start of code to tb table. */
9611 fputs ("\t.long ", file
);
9612 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
9614 RS6000_OUTPUT_BASENAME (file
, fname
);
9616 assemble_name (file
, fname
);
9620 RS6000_OUTPUT_BASENAME (file
, fname
);
9622 assemble_name (file
, fname
);
9626 /* Interrupt handler mask. */
9627 /* Omit this long, since we never set the interrupt handler bit
9630 /* Number of CTL (controlled storage) anchors. */
9631 /* Omit this long, since the has_ctl bit is never set above. */
9633 /* Displacement into stack of each CTL anchor. */
9634 /* Omit this list of longs, because there are no CTL anchors. */
9636 /* Length of function name. */
9637 fprintf (file
, "\t.short %d\n", (int) strlen (fname
));
9639 /* Function name. */
9640 assemble_string (fname
, strlen (fname
));
9642 /* Register for alloca automatic storage; this is always reg 31.
9643 Only emit this if the alloca bit was set above. */
9644 if (frame_pointer_needed
)
9645 fputs ("\t.byte 31\n", file
);
9647 fputs ("\t.align 2\n", file
);
9652 /* A C compound statement that outputs the assembler code for a thunk
9653 function, used to implement C++ virtual function calls with
9654 multiple inheritance. The thunk acts as a wrapper around a virtual
9655 function, adjusting the implicit object parameter before handing
9656 control off to the real function.
9658 First, emit code to add the integer DELTA to the location that
9659 contains the incoming first argument. Assume that this argument
9660 contains a pointer, and is the one used to pass the `this' pointer
9661 in C++. This is the incoming argument *before* the function
9662 prologue, e.g. `%o0' on a sparc. The addition must preserve the
9663 values of all other incoming arguments.
9665 After the addition, emit code to jump to FUNCTION, which is a
9666 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
9667 not touch the return address. Hence returning from FUNCTION will
9668 return to whoever called the current `thunk'.
9670 The effect must be as if FUNCTION had been called directly with the
9671 adjusted first argument. This macro is responsible for emitting
9672 all of the code for a thunk function; output_function_prologue()
9673 and output_function_epilogue() are not invoked.
9675 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
9676 been extracted from it.) It might possibly be useful on some
9677 targets, but probably not.
9679 If you do not define this macro, the target-independent code in the
9680 C++ frontend will generate a less efficient heavyweight thunk that
9681 calls FUNCTION instead of jumping to it. The generic approach does
9682 not support varargs. */
9685 output_mi_thunk (file
, thunk_fndecl
, delta
, function
)
9687 tree thunk_fndecl ATTRIBUTE_UNUSED
;
9691 const char *this_reg
=
9692 reg_names
[ aggregate_value_p (TREE_TYPE (TREE_TYPE (function
))) ? 4 : 3 ];
9695 const char *r0
= reg_names
[0];
9696 const char *toc
= reg_names
[2];
9697 const char *schain
= reg_names
[11];
9698 const char *r12
= reg_names
[12];
9700 static int labelno
= 0;
9702 /* Small constants that can be done by one add instruction. */
9703 if (delta
>= -32768 && delta
<= 32767)
9705 if (! TARGET_NEW_MNEMONICS
)
9706 fprintf (file
, "\tcal %s,%d(%s)\n", this_reg
, delta
, this_reg
);
9708 fprintf (file
, "\taddi %s,%s,%d\n", this_reg
, this_reg
, delta
);
9711 /* Large constants that can be done by one addis instruction. */
9712 else if ((delta
& 0xffff) == 0 && num_insns_constant_wide (delta
) == 1)
9713 asm_fprintf (file
, "\t{cau|addis} %s,%s,%d\n", this_reg
, this_reg
,
9716 /* 32-bit constants that can be done by an add and addis instruction. */
9717 else if (TARGET_32BIT
|| num_insns_constant_wide (delta
) == 1)
9719 /* Break into two pieces, propagating the sign bit from the low
9720 word to the upper word. */
9721 int delta_high
= delta
>> 16;
9722 int delta_low
= delta
& 0xffff;
9723 if ((delta_low
& 0x8000) != 0)
9726 delta_low
= (delta_low
^ 0x8000) - 0x8000; /* sign extend */
9729 asm_fprintf (file
, "\t{cau|addis} %s,%s,%d\n", this_reg
, this_reg
,
9732 if (! TARGET_NEW_MNEMONICS
)
9733 fprintf (file
, "\tcal %s,%d(%s)\n", this_reg
, delta_low
, this_reg
);
9735 fprintf (file
, "\taddi %s,%s,%d\n", this_reg
, this_reg
, delta_low
);
9738 /* 64-bit constants, fixme */
9742 /* Get the prefix in front of the names. */
9743 switch (DEFAULT_ABI
)
9753 case ABI_AIX_NODESC
:
9758 /* If the function is compiled in this module, jump to it directly.
9759 Otherwise, load up its address and jump to it. */
9761 fname
= XSTR (XEXP (DECL_RTL (function
), 0), 0);
9763 if (current_file_function_operand (XEXP (DECL_RTL (function
), 0), VOIDmode
)
9764 && ! lookup_attribute ("longcall",
9765 TYPE_ATTRIBUTES (TREE_TYPE (function
))))
9767 fprintf (file
, "\tb %s", prefix
);
9768 assemble_name (file
, fname
);
9769 if (DEFAULT_ABI
== ABI_V4
&& flag_pic
) fputs ("@local", file
);
9775 switch (DEFAULT_ABI
)
9781 /* Set up a TOC entry for the function. */
9782 ASM_GENERATE_INTERNAL_LABEL (buf
, "Lthunk", labelno
);
9784 ASM_OUTPUT_INTERNAL_LABEL (file
, "Lthunk", labelno
);
9787 if (TARGET_MINIMAL_TOC
)
9788 fputs (TARGET_32BIT
? "\t.long " : DOUBLE_INT_ASM_OP
, file
);
9791 fputs ("\t.tc ", file
);
9792 assemble_name (file
, fname
);
9793 fputs ("[TC],", file
);
9795 assemble_name (file
, fname
);
9798 if (TARGET_MINIMAL_TOC
)
9799 asm_fprintf (file
, (TARGET_32BIT
)
9800 ? "\t{l|lwz} %s,%s(%s)\n" : "\tld %s,%s(%s)\n", r12
,
9801 TARGET_ELF
? ".LCTOC0@toc" : ".LCTOC..1", toc
);
9802 asm_fprintf (file
, (TARGET_32BIT
) ? "\t{l|lwz} %s," : "\tld %s,", r12
);
9803 assemble_name (file
, buf
);
9804 if (TARGET_ELF
&& TARGET_MINIMAL_TOC
)
9805 fputs ("-(.LCTOC1)", file
);
9806 asm_fprintf (file
, "(%s)\n", TARGET_MINIMAL_TOC
? r12
: toc
);
9808 (TARGET_32BIT
) ? "\t{l|lwz} %s,0(%s)\n" : "\tld %s,0(%s)\n",
9812 (TARGET_32BIT
) ? "\t{l|lwz} %s,4(%s)\n" : "\tld %s,8(%s)\n",
9815 asm_fprintf (file
, "\tmtctr %s\n", r0
);
9817 (TARGET_32BIT
) ? "\t{l|lwz} %s,8(%s)\n" : "\tld %s,16(%s)\n",
9820 asm_fprintf (file
, "\tbctr\n");
9823 case ABI_AIX_NODESC
:
9825 fprintf (file
, "\tb %s", prefix
);
9826 assemble_name (file
, fname
);
9827 if (flag_pic
) fputs ("@plt", file
);
9833 fprintf (file
, "\tb %s", prefix
);
9834 if (flag_pic
&& !machopic_name_defined_p (fname
))
9835 assemble_name (file
, machopic_stub_name (fname
));
9837 assemble_name (file
, fname
);
9846 /* A quick summary of the various types of 'constant-pool tables'
9849 Target Flags Name One table per
9850 AIX (none) AIX TOC object file
9851 AIX -mfull-toc AIX TOC object file
9852 AIX -mminimal-toc AIX minimal TOC translation unit
9853 SVR4/EABI (none) SVR4 SDATA object file
9854 SVR4/EABI -fpic SVR4 pic object file
9855 SVR4/EABI -fPIC SVR4 PIC translation unit
9856 SVR4/EABI -mrelocatable EABI TOC function
9857 SVR4/EABI -maix AIX TOC object file
9858 SVR4/EABI -maix -mminimal-toc
9859 AIX minimal TOC translation unit
9861 Name Reg. Set by entries contains:
9862 made by addrs? fp? sum?
9864 AIX TOC 2 crt0 as Y option option
9865 AIX minimal TOC 30 prolog gcc Y Y option
9866 SVR4 SDATA 13 crt0 gcc N Y N
9867 SVR4 pic 30 prolog ld Y not yet N
9868 SVR4 PIC 30 prolog gcc Y option option
9869 EABI TOC 30 prolog gcc Y option option
9873 /* Hash table stuff for keeping track of TOC entries. */
9875 struct toc_hash_struct
9877 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
9878 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
9880 enum machine_mode key_mode
;
9884 static htab_t toc_hash_table
;
9886 /* Hash functions for the hash table. */
9889 rs6000_hash_constant (k
)
9892 unsigned result
= (GET_CODE (k
) << 3) ^ GET_MODE (k
);
9893 const char *format
= GET_RTX_FORMAT (GET_CODE (k
));
9894 int flen
= strlen (format
);
9897 if (GET_CODE (k
) == LABEL_REF
)
9898 return result
* 1231 + X0INT (XEXP (k
, 0), 3);
9900 if (GET_CODE (k
) == CONST_DOUBLE
)
9902 else if (GET_CODE (k
) == CODE_LABEL
)
9907 for (; fidx
< flen
; fidx
++)
9908 switch (format
[fidx
])
9913 const char *str
= XSTR (k
, fidx
);
9915 result
= result
* 613 + len
;
9916 for (i
= 0; i
< len
; i
++)
9917 result
= result
* 613 + (unsigned) str
[i
];
9922 result
= result
* 1231 + rs6000_hash_constant (XEXP (k
, fidx
));
9926 result
= result
* 613 + (unsigned) XINT (k
, fidx
);
9929 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT
))
9930 result
= result
* 613 + (unsigned) XWINT (k
, fidx
);
9934 for (i
= 0; i
< sizeof(HOST_WIDE_INT
)/sizeof(unsigned); i
++)
9935 result
= result
* 613 + (unsigned) (XWINT (k
, fidx
)
9946 toc_hash_function (hash_entry
)
9947 const void * hash_entry
;
9949 const struct toc_hash_struct
*thc
=
9950 (const struct toc_hash_struct
*) hash_entry
;
9951 return rs6000_hash_constant (thc
->key
) ^ thc
->key_mode
;
9954 /* Compare H1 and H2 for equivalence. */
9957 toc_hash_eq (h1
, h2
)
9961 rtx r1
= ((const struct toc_hash_struct
*) h1
)->key
;
9962 rtx r2
= ((const struct toc_hash_struct
*) h2
)->key
;
9964 if (((const struct toc_hash_struct
*) h1
)->key_mode
9965 != ((const struct toc_hash_struct
*) h2
)->key_mode
)
9968 /* Gotcha: One of these const_doubles will be in memory.
9969 The other may be on the constant-pool chain.
9970 So rtx_equal_p will think they are different... */
9973 if (GET_CODE (r1
) != GET_CODE (r2
)
9974 || GET_MODE (r1
) != GET_MODE (r2
))
9976 if (GET_CODE (r1
) == CONST_DOUBLE
)
9978 int format_len
= strlen (GET_RTX_FORMAT (CONST_DOUBLE
));
9980 for (i
= 1; i
< format_len
; i
++)
9981 if (XWINT (r1
, i
) != XWINT (r2
, i
))
9986 else if (GET_CODE (r1
) == LABEL_REF
)
9987 return (CODE_LABEL_NUMBER (XEXP (r1
, 0))
9988 == CODE_LABEL_NUMBER (XEXP (r2
, 0)));
9990 return rtx_equal_p (r1
, r2
);
9993 /* Mark the hash table-entry HASH_ENTRY. */
9996 toc_hash_mark_entry (hash_slot
, unused
)
9998 void * unused ATTRIBUTE_UNUSED
;
10000 const struct toc_hash_struct
* hash_entry
=
10001 *(const struct toc_hash_struct
**) hash_slot
;
10002 rtx r
= hash_entry
->key
;
10003 ggc_set_mark (hash_entry
);
10004 /* For CODE_LABELS, we don't want to drag in the whole insn chain... */
10005 if (GET_CODE (r
) == LABEL_REF
)
10008 ggc_set_mark (XEXP (r
, 0));
10015 /* Mark all the elements of the TOC hash-table *HT. */
10018 toc_hash_mark_table (vht
)
10023 htab_traverse (*ht
, toc_hash_mark_entry
, (void *)0);
10026 /* These are the names given by the C++ front-end to vtables, and
10027 vtable-like objects. Ideally, this logic should not be here;
10028 instead, there should be some programmatic way of inquiring as
10029 to whether or not an object is a vtable. */
10031 #define VTABLE_NAME_P(NAME) \
10032 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
10033 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
10034 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
10035 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
10038 rs6000_output_symbol_ref (file
, x
)
10042 /* Currently C++ toc references to vtables can be emitted before it
10043 is decided whether the vtable is public or private. If this is
10044 the case, then the linker will eventually complain that there is
10045 a reference to an unknown section. Thus, for vtables only,
10046 we emit the TOC reference to reference the symbol and not the
10048 const char *name
= XSTR (x
, 0);
10050 if (VTABLE_NAME_P (name
))
10052 RS6000_OUTPUT_BASENAME (file
, name
);
10055 assemble_name (file
, name
);
10058 /* Output a TOC entry. We derive the entry name from what is being
10062 output_toc (file
, x
, labelno
, mode
)
10066 enum machine_mode mode
;
10069 const char *name
= buf
;
10070 const char *real_name
;
10077 /* When the linker won't eliminate them, don't output duplicate
10078 TOC entries (this happens on AIX if there is any kind of TOC,
10079 and on SVR4 under -fPIC or -mrelocatable). */
10082 struct toc_hash_struct
*h
;
10085 h
= ggc_alloc (sizeof (*h
));
10087 h
->key_mode
= mode
;
10088 h
->labelno
= labelno
;
10090 found
= htab_find_slot (toc_hash_table
, h
, 1);
10091 if (*found
== NULL
)
10093 else /* This is indeed a duplicate.
10094 Set this label equal to that label. */
10096 fputs ("\t.set ", file
);
10097 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
10098 fprintf (file
, "%d,", labelno
);
10099 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
10100 fprintf (file
, "%d\n", ((*(const struct toc_hash_struct
**)
10106 /* If we're going to put a double constant in the TOC, make sure it's
10107 aligned properly when strict alignment is on. */
10108 if (GET_CODE (x
) == CONST_DOUBLE
10109 && STRICT_ALIGNMENT
10110 && GET_MODE_BITSIZE (mode
) >= 64
10111 && ! (TARGET_NO_FP_IN_TOC
&& ! TARGET_MINIMAL_TOC
)) {
10112 ASM_OUTPUT_ALIGN (file
, 3);
10115 ASM_OUTPUT_INTERNAL_LABEL (file
, "LC", labelno
);
10117 /* Handle FP constants specially. Note that if we have a minimal
10118 TOC, things we put here aren't actually in the TOC, so we can allow
10120 if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == DFmode
)
10122 REAL_VALUE_TYPE rv
;
10125 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
10126 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
10130 if (TARGET_MINIMAL_TOC
)
10131 fputs (DOUBLE_INT_ASM_OP
, file
);
10133 fprintf (file
, "\t.tc FD_%lx_%lx[TC],", k
[0], k
[1]);
10134 fprintf (file
, "0x%lx%08lx\n", k
[0], k
[1]);
10139 if (TARGET_MINIMAL_TOC
)
10140 fputs ("\t.long ", file
);
10142 fprintf (file
, "\t.tc FD_%lx_%lx[TC],", k
[0], k
[1]);
10143 fprintf (file
, "0x%lx,0x%lx\n", k
[0], k
[1]);
10147 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == SFmode
)
10149 REAL_VALUE_TYPE rv
;
10152 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
10153 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
10157 if (TARGET_MINIMAL_TOC
)
10158 fputs (DOUBLE_INT_ASM_OP
, file
);
10160 fprintf (file
, "\t.tc FS_%lx[TC],", l
);
10161 fprintf (file
, "0x%lx00000000\n", l
);
10166 if (TARGET_MINIMAL_TOC
)
10167 fputs ("\t.long ", file
);
10169 fprintf (file
, "\t.tc FS_%lx[TC],", l
);
10170 fprintf (file
, "0x%lx\n", l
);
10174 else if (GET_MODE (x
) == VOIDmode
10175 && (GET_CODE (x
) == CONST_INT
|| GET_CODE (x
) == CONST_DOUBLE
))
10177 unsigned HOST_WIDE_INT low
;
10178 HOST_WIDE_INT high
;
10180 if (GET_CODE (x
) == CONST_DOUBLE
)
10182 low
= CONST_DOUBLE_LOW (x
);
10183 high
= CONST_DOUBLE_HIGH (x
);
10186 #if HOST_BITS_PER_WIDE_INT == 32
10189 high
= (low
& 0x80000000) ? ~0 : 0;
10193 low
= INTVAL (x
) & 0xffffffff;
10194 high
= (HOST_WIDE_INT
) INTVAL (x
) >> 32;
10198 /* TOC entries are always Pmode-sized, but since this
10199 is a bigendian machine then if we're putting smaller
10200 integer constants in the TOC we have to pad them.
10201 (This is still a win over putting the constants in
10202 a separate constant pool, because then we'd have
10203 to have both a TOC entry _and_ the actual constant.)
10205 For a 32-bit target, CONST_INT values are loaded and shifted
10206 entirely within `low' and can be stored in one TOC entry. */
10208 if (TARGET_64BIT
&& POINTER_SIZE
< GET_MODE_BITSIZE (mode
))
10209 abort ();/* It would be easy to make this work, but it doesn't now. */
10211 if (POINTER_SIZE
> GET_MODE_BITSIZE (mode
))
10212 lshift_double (low
, high
, POINTER_SIZE
- GET_MODE_BITSIZE (mode
),
10213 POINTER_SIZE
, &low
, &high
, 0);
10217 if (TARGET_MINIMAL_TOC
)
10218 fputs (DOUBLE_INT_ASM_OP
, file
);
10220 fprintf (file
, "\t.tc ID_%lx_%lx[TC],", (long) high
, (long) low
);
10221 fprintf (file
, "0x%lx%08lx\n", (long) high
, (long) low
);
10226 if (POINTER_SIZE
< GET_MODE_BITSIZE (mode
))
10228 if (TARGET_MINIMAL_TOC
)
10229 fputs ("\t.long ", file
);
10231 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
10232 (long) high
, (long) low
);
10233 fprintf (file
, "0x%lx,0x%lx\n", (long) high
, (long) low
);
10237 if (TARGET_MINIMAL_TOC
)
10238 fputs ("\t.long ", file
);
10240 fprintf (file
, "\t.tc IS_%lx[TC],", (long) low
);
10241 fprintf (file
, "0x%lx\n", (long) low
);
10247 if (GET_CODE (x
) == CONST
)
10249 if (GET_CODE (XEXP (x
, 0)) != PLUS
)
10252 base
= XEXP (XEXP (x
, 0), 0);
10253 offset
= INTVAL (XEXP (XEXP (x
, 0), 1));
10256 if (GET_CODE (base
) == SYMBOL_REF
)
10257 name
= XSTR (base
, 0);
10258 else if (GET_CODE (base
) == LABEL_REF
)
10259 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (XEXP (base
, 0)));
10260 else if (GET_CODE (base
) == CODE_LABEL
)
10261 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (base
));
10265 STRIP_NAME_ENCODING (real_name
, name
);
10266 if (TARGET_MINIMAL_TOC
)
10267 fputs (TARGET_32BIT
? "\t.long " : DOUBLE_INT_ASM_OP
, file
);
10270 fprintf (file
, "\t.tc %s", real_name
);
10273 fprintf (file
, ".N%d", - offset
);
10275 fprintf (file
, ".P%d", offset
);
10277 fputs ("[TC],", file
);
10280 /* Currently C++ toc references to vtables can be emitted before it
10281 is decided whether the vtable is public or private. If this is
10282 the case, then the linker will eventually complain that there is
10283 a TOC reference to an unknown section. Thus, for vtables only,
10284 we emit the TOC reference to reference the symbol and not the
10286 if (VTABLE_NAME_P (name
))
10288 RS6000_OUTPUT_BASENAME (file
, name
);
10290 fprintf (file
, "%d", offset
);
10291 else if (offset
> 0)
10292 fprintf (file
, "+%d", offset
);
10295 output_addr_const (file
, x
);
10299 /* Output an assembler pseudo-op to write an ASCII string of N characters
10300 starting at P to FILE.
10302 On the RS/6000, we have to do this using the .byte operation and
10303 write out special characters outside the quoted string.
10304 Also, the assembler is broken; very long strings are truncated,
10305 so we must artificially break them up early. */
10308 output_ascii (file
, p
, n
)
10314 int i
, count_string
;
10315 const char *for_string
= "\t.byte \"";
10316 const char *for_decimal
= "\t.byte ";
10317 const char *to_close
= NULL
;
10320 for (i
= 0; i
< n
; i
++)
10323 if (c
>= ' ' && c
< 0177)
10326 fputs (for_string
, file
);
10329 /* Write two quotes to get one. */
10337 for_decimal
= "\"\n\t.byte ";
10341 if (count_string
>= 512)
10343 fputs (to_close
, file
);
10345 for_string
= "\t.byte \"";
10346 for_decimal
= "\t.byte ";
10354 fputs (for_decimal
, file
);
10355 fprintf (file
, "%d", c
);
10357 for_string
= "\n\t.byte \"";
10358 for_decimal
= ", ";
10364 /* Now close the string if we have written one. Then end the line. */
10366 fputs (to_close
, file
);
10369 /* Generate a unique section name for FILENAME for a section type
10370 represented by SECTION_DESC. Output goes into BUF.
10372 SECTION_DESC can be any string, as long as it is different for each
10373 possible section type.
10375 We name the section in the same manner as xlc. The name begins with an
10376 underscore followed by the filename (after stripping any leading directory
10377 names) with the last period replaced by the string SECTION_DESC. If
10378 FILENAME does not contain a period, SECTION_DESC is appended to the end of
10382 rs6000_gen_section_name (buf
, filename
, section_desc
)
10384 const char *filename
;
10385 const char *section_desc
;
10387 const char *q
, *after_last_slash
, *last_period
= 0;
10391 after_last_slash
= filename
;
10392 for (q
= filename
; *q
; q
++)
10395 after_last_slash
= q
+ 1;
10396 else if (*q
== '.')
10400 len
= strlen (after_last_slash
) + strlen (section_desc
) + 2;
10401 *buf
= (char *) permalloc (len
);
10406 for (q
= after_last_slash
; *q
; q
++)
10408 if (q
== last_period
)
10410 strcpy (p
, section_desc
);
10411 p
+= strlen (section_desc
);
10414 else if (ISALNUM (*q
))
10418 if (last_period
== 0)
10419 strcpy (p
, section_desc
);
10424 /* Emit profile function. */
10427 output_profile_hook (labelno
)
10430 if (DEFAULT_ABI
== ABI_AIX
)
10433 const char *label_name
;
10438 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
10439 STRIP_NAME_ENCODING (label_name
, ggc_strdup (buf
));
10440 fun
= gen_rtx_SYMBOL_REF (Pmode
, label_name
);
10442 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 1,
10445 else if (DEFAULT_ABI
== ABI_DARWIN
)
10447 const char *mcount_name
= RS6000_MCOUNT
;
10448 int caller_addr_regno
= LINK_REGISTER_REGNUM
;
10450 /* Be conservative and always set this, at least for now. */
10451 current_function_uses_pic_offset_table
= 1;
10454 /* For PIC code, set up a stub and collect the caller's address
10455 from r0, which is where the prologue puts it. */
10458 mcount_name
= machopic_stub_name (mcount_name
);
10459 if (current_function_uses_pic_offset_table
)
10460 caller_addr_regno
= 0;
10463 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, mcount_name
),
10465 gen_rtx_REG (Pmode
, caller_addr_regno
), Pmode
);
10469 /* Write function profiler code. */
10472 output_function_profiler (file
, labelno
)
10478 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
10479 switch (DEFAULT_ABI
)
10485 case ABI_AIX_NODESC
:
10486 fprintf (file
, "\tmflr %s\n", reg_names
[0]);
10489 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file
);
10490 asm_fprintf (file
, "\t{st|stw} %s,4(%s)\n",
10491 reg_names
[0], reg_names
[1]);
10492 asm_fprintf (file
, "\tmflr %s\n", reg_names
[12]);
10493 asm_fprintf (file
, "\t{l|lwz} %s,", reg_names
[0]);
10494 assemble_name (file
, buf
);
10495 asm_fprintf (file
, "@got(%s)\n", reg_names
[12]);
10497 else if (flag_pic
> 1)
10499 asm_fprintf (file
, "\t{st|stw} %s,4(%s)\n",
10500 reg_names
[0], reg_names
[1]);
10501 /* Now, we need to get the address of the label. */
10502 fputs ("\tbl 1f\n\t.long ", file
);
10503 assemble_name (file
, buf
);
10504 fputs ("-.\n1:", file
);
10505 asm_fprintf (file
, "\tmflr %s\n", reg_names
[11]);
10506 asm_fprintf (file
, "\t{l|lwz} %s,0(%s)\n",
10507 reg_names
[0], reg_names
[11]);
10508 asm_fprintf (file
, "\t{cax|add} %s,%s,%s\n",
10509 reg_names
[0], reg_names
[0], reg_names
[11]);
10513 asm_fprintf (file
, "\t{liu|lis} %s,", reg_names
[12]);
10514 assemble_name (file
, buf
);
10515 fputs ("@ha\n", file
);
10516 asm_fprintf (file
, "\t{st|stw} %s,4(%s)\n",
10517 reg_names
[0], reg_names
[1]);
10518 asm_fprintf (file
, "\t{cal|la} %s,", reg_names
[0]);
10519 assemble_name (file
, buf
);
10520 asm_fprintf (file
, "@l(%s)\n", reg_names
[12]);
10523 if (current_function_needs_context
)
10524 asm_fprintf (file
, "\tmr %s,%s\n",
10525 reg_names
[30], reg_names
[STATIC_CHAIN_REGNUM
]);
10526 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
10527 if (current_function_needs_context
)
10528 asm_fprintf (file
, "\tmr %s,%s\n",
10529 reg_names
[STATIC_CHAIN_REGNUM
], reg_names
[30]);
10534 /* Don't do anything, done in output_profile_hook (). */
10540 /* Adjust the cost of a scheduling dependency. Return the new cost of
10541 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
10544 rs6000_adjust_cost (insn
, link
, dep_insn
, cost
)
10547 rtx dep_insn ATTRIBUTE_UNUSED
;
10550 if (! recog_memoized (insn
))
10553 if (REG_NOTE_KIND (link
) != 0)
10556 if (REG_NOTE_KIND (link
) == 0)
10558 /* Data dependency; DEP_INSN writes a register that INSN reads
10559 some cycles later. */
10560 switch (get_attr_type (insn
))
10563 /* Tell the first scheduling pass about the latency between
10564 a mtctr and bctr (and mtlr and br/blr). The first
10565 scheduling pass will not know about this latency since
10566 the mtctr instruction, which has the latency associated
10567 to it, will be generated by reload. */
10568 return TARGET_POWER
? 5 : 4;
10570 /* Leave some extra cycles between a compare and its
10571 dependent branch, to inhibit expensive mispredicts. */
10572 if ((rs6000_cpu_attr
== CPU_PPC750
10573 || rs6000_cpu_attr
== CPU_PPC7400
10574 || rs6000_cpu_attr
== CPU_PPC7450
)
10575 && recog_memoized (dep_insn
)
10576 && (INSN_CODE (dep_insn
) >= 0)
10577 && (get_attr_type (dep_insn
) == TYPE_COMPARE
10578 || get_attr_type (dep_insn
) == TYPE_DELAYED_COMPARE
10579 || get_attr_type (dep_insn
) == TYPE_FPCOMPARE
10580 || get_attr_type (dep_insn
) == TYPE_CR_LOGICAL
))
10585 /* Fall out to return default cost. */
10591 /* A C statement (sans semicolon) to update the integer scheduling
10592 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
10593 INSN earlier, increase the priority to execute INSN later. Do not
10594 define this macro if you do not need to adjust the scheduling
10595 priorities of insns. */
10598 rs6000_adjust_priority (insn
, priority
)
10599 rtx insn ATTRIBUTE_UNUSED
;
10602 /* On machines (like the 750) which have asymmetric integer units,
10603 where one integer unit can do multiply and divides and the other
10604 can't, reduce the priority of multiply/divide so it is scheduled
10605 before other integer operations. */
10608 if (! INSN_P (insn
))
10611 if (GET_CODE (PATTERN (insn
)) == USE
)
10614 switch (rs6000_cpu_attr
) {
10616 switch (get_attr_type (insn
))
10623 fprintf (stderr
, "priority was %#x (%d) before adjustment\n",
10624 priority
, priority
);
10625 if (priority
>= 0 && priority
< 0x01000000)
10635 /* Return how many instructions the machine can issue per cycle. */
10638 rs6000_issue_rate ()
10640 switch (rs6000_cpu_attr
) {
10641 case CPU_RIOS1
: /* ? */
10643 case CPU_PPC601
: /* ? */
10662 /* Length in units of the trampoline for entering a nested function. */
10665 rs6000_trampoline_size ()
10669 switch (DEFAULT_ABI
)
10675 ret
= (TARGET_32BIT
) ? 12 : 24;
10680 case ABI_AIX_NODESC
:
10681 ret
= (TARGET_32BIT
) ? 40 : 48;
10688 /* Emit RTL insns to initialize the variable parts of a trampoline.
10689 FNADDR is an RTX for the address of the function's pure code.
10690 CXT is an RTX for the static chain value for the function. */
10693 rs6000_initialize_trampoline (addr
, fnaddr
, cxt
)
10698 enum machine_mode pmode
= Pmode
;
10699 int regsize
= (TARGET_32BIT
) ? 4 : 8;
10700 rtx ctx_reg
= force_reg (pmode
, cxt
);
10702 switch (DEFAULT_ABI
)
10707 /* Macros to shorten the code expansions below. */
10708 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
10709 #define MEM_PLUS(addr,offset) \
10710 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
10712 /* Under AIX, just build the 3 word function descriptor */
10715 rtx fn_reg
= gen_reg_rtx (pmode
);
10716 rtx toc_reg
= gen_reg_rtx (pmode
);
10717 emit_move_insn (fn_reg
, MEM_DEREF (fnaddr
));
10718 emit_move_insn (toc_reg
, MEM_PLUS (fnaddr
, regsize
));
10719 emit_move_insn (MEM_DEREF (addr
), fn_reg
);
10720 emit_move_insn (MEM_PLUS (addr
, regsize
), toc_reg
);
10721 emit_move_insn (MEM_PLUS (addr
, 2*regsize
), ctx_reg
);
10725 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
10728 case ABI_AIX_NODESC
:
10729 emit_library_call (gen_rtx_SYMBOL_REF (SImode
, "__trampoline_setup"),
10730 FALSE
, VOIDmode
, 4,
10732 GEN_INT (rs6000_trampoline_size ()), SImode
,
10742 /* Table of valid machine attributes. */
10744 const struct attribute_spec rs6000_attribute_table
[] =
10746 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
10747 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
10748 { NULL
, 0, 0, false, false, false, NULL
}
10751 /* Handle a "longcall" attribute; arguments as in struct
10752 attribute_spec.handler. */
10755 rs6000_handle_longcall_attribute (node
, name
, args
, flags
, no_add_attrs
)
10758 tree args ATTRIBUTE_UNUSED
;
10759 int flags ATTRIBUTE_UNUSED
;
10760 bool *no_add_attrs
;
10762 if (TREE_CODE (*node
) != FUNCTION_TYPE
10763 && TREE_CODE (*node
) != FIELD_DECL
10764 && TREE_CODE (*node
) != TYPE_DECL
)
10766 warning ("`%s' attribute only applies to functions",
10767 IDENTIFIER_POINTER (name
));
10768 *no_add_attrs
= true;
10774 /* Return a reference suitable for calling a function with the
10775 longcall attribute. */
10778 rs6000_longcall_ref (call_ref
)
10781 const char *call_name
;
10784 if (GET_CODE (call_ref
) != SYMBOL_REF
)
10787 /* System V adds '.' to the internal name, so skip them. */
10788 call_name
= XSTR (call_ref
, 0);
10789 if (*call_name
== '.')
10791 while (*call_name
== '.')
10794 node
= get_identifier (call_name
);
10795 call_ref
= gen_rtx_SYMBOL_REF (VOIDmode
, IDENTIFIER_POINTER (node
));
10798 return force_reg (Pmode
, call_ref
);
10802 /* A C statement or statements to switch to the appropriate section
10803 for output of RTX in mode MODE. You can assume that RTX is some
10804 kind of constant in RTL. The argument MODE is redundant except in
10805 the case of a `const_int' rtx. Select the section by calling
10806 `text_section' or one of the alternatives for other sections.
10808 Do not define this macro if you put all constants in the read-only
10811 #ifdef USING_ELFOS_H
10814 rs6000_select_rtx_section (mode
, x
)
10815 enum machine_mode mode
;
10818 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
10824 /* A C statement or statements to switch to the appropriate
10825 section for output of DECL. DECL is either a `VAR_DECL' node
10826 or a constant of some sort. RELOC indicates whether forming
10827 the initial value of DECL requires link-time relocations. */
10830 rs6000_select_section (decl
, reloc
)
10834 int size
= int_size_in_bytes (TREE_TYPE (decl
));
10837 static void (* const sec_funcs
[4]) PARAMS ((void)) = {
10844 needs_sdata
= (size
> 0
10845 && size
<= g_switch_value
10846 && rs6000_sdata
!= SDATA_NONE
10847 && (rs6000_sdata
!= SDATA_DATA
|| TREE_PUBLIC (decl
)));
10849 if (TREE_CODE (decl
) == STRING_CST
)
10850 readonly
= ! flag_writable_strings
;
10851 else if (TREE_CODE (decl
) == VAR_DECL
)
10852 readonly
= (! (flag_pic
&& reloc
)
10853 && TREE_READONLY (decl
)
10854 && ! TREE_SIDE_EFFECTS (decl
)
10855 && DECL_INITIAL (decl
)
10856 && DECL_INITIAL (decl
) != error_mark_node
10857 && TREE_CONSTANT (DECL_INITIAL (decl
)));
10858 else if (TREE_CODE (decl
) == CONSTRUCTOR
)
10859 readonly
= (! (flag_pic
&& reloc
)
10860 && ! TREE_SIDE_EFFECTS (decl
)
10861 && TREE_CONSTANT (decl
));
10864 if (needs_sdata
&& rs6000_sdata
!= SDATA_EABI
)
10867 (*sec_funcs
[(readonly
? 0 : 2) + (needs_sdata
? 1 : 0)])();
10870 /* A C statement to build up a unique section name, expressed as a
10871 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
10872 RELOC indicates whether the initial value of EXP requires
10873 link-time relocations. If you do not define this macro, GCC will use
10874 the symbol name prefixed by `.' as the section name. Note - this
10875 macro can now be called for uninitialized data items as well as
10876 initialised data and functions. */
10879 rs6000_unique_section (decl
, reloc
)
10887 const char *prefix
;
10889 static const char *const prefixes
[7][2] =
10891 { ".rodata.", ".gnu.linkonce.r." },
10892 { ".sdata2.", ".gnu.linkonce.s2." },
10893 { ".data.", ".gnu.linkonce.d." },
10894 { ".sdata.", ".gnu.linkonce.s." },
10895 { ".bss.", ".gnu.linkonce.b." },
10896 { ".sbss.", ".gnu.linkonce.sb." },
10897 { ".text.", ".gnu.linkonce.t." }
10900 if (TREE_CODE (decl
) == FUNCTION_DECL
)
10909 if (TREE_CODE (decl
) == STRING_CST
)
10910 readonly
= ! flag_writable_strings
;
10911 else if (TREE_CODE (decl
) == VAR_DECL
)
10912 readonly
= (! (flag_pic
&& reloc
)
10913 && TREE_READONLY (decl
)
10914 && ! TREE_SIDE_EFFECTS (decl
)
10915 && TREE_CONSTANT (DECL_INITIAL (decl
)));
10917 size
= int_size_in_bytes (TREE_TYPE (decl
));
10918 needs_sdata
= (size
> 0
10919 && size
<= g_switch_value
10920 && rs6000_sdata
!= SDATA_NONE
10921 && (rs6000_sdata
!= SDATA_DATA
|| TREE_PUBLIC (decl
)));
10923 if (DECL_INITIAL (decl
) == 0
10924 || DECL_INITIAL (decl
) == error_mark_node
)
10926 else if (! readonly
)
10933 /* .sdata2 is only for EABI. */
10934 if (sec
== 0 && rs6000_sdata
!= SDATA_EABI
)
10940 STRIP_NAME_ENCODING (name
, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
)));
10941 prefix
= prefixes
[sec
][DECL_ONE_ONLY (decl
)];
10942 len
= strlen (name
) + strlen (prefix
);
10943 string
= alloca (len
+ 1);
10945 sprintf (string
, "%s%s", prefix
, name
);
10947 DECL_SECTION_NAME (decl
) = build_string (len
, string
);
10951 /* If we are referencing a function that is static or is known to be
10952 in this file, make the SYMBOL_REF special. We can use this to indicate
10953 that we can branch to this function without emitting a no-op after the
10954 call. For real AIX calling sequences, we also replace the
10955 function name with the real name (1 or 2 leading .'s), rather than
10956 the function descriptor name. This saves a lot of overriding code
10957 to read the prefixes. */
10960 rs6000_encode_section_info (decl
)
10963 if (TREE_CODE (decl
) == FUNCTION_DECL
)
10965 rtx sym_ref
= XEXP (DECL_RTL (decl
), 0);
10966 if ((TREE_ASM_WRITTEN (decl
) || ! TREE_PUBLIC (decl
))
10967 && ! DECL_WEAK (decl
))
10968 SYMBOL_REF_FLAG (sym_ref
) = 1;
10970 if (DEFAULT_ABI
== ABI_AIX
)
10972 size_t len1
= (DEFAULT_ABI
== ABI_AIX
) ? 1 : 2;
10973 size_t len2
= strlen (XSTR (sym_ref
, 0));
10974 char *str
= alloca (len1
+ len2
+ 1);
10977 memcpy (str
+ len1
, XSTR (sym_ref
, 0), len2
+ 1);
10979 XSTR (sym_ref
, 0) = ggc_alloc_string (str
, len1
+ len2
);
10982 else if (rs6000_sdata
!= SDATA_NONE
10983 && DEFAULT_ABI
== ABI_V4
10984 && TREE_CODE (decl
) == VAR_DECL
)
10986 int size
= int_size_in_bytes (TREE_TYPE (decl
));
10987 tree section_name
= DECL_SECTION_NAME (decl
);
10988 const char *name
= (char *)0;
10993 if (TREE_CODE (section_name
) == STRING_CST
)
10995 name
= TREE_STRING_POINTER (section_name
);
10996 len
= TREE_STRING_LENGTH (section_name
);
11002 if ((size
> 0 && size
<= g_switch_value
)
11004 && ((len
== sizeof (".sdata") - 1
11005 && strcmp (name
, ".sdata") == 0)
11006 || (len
== sizeof (".sdata2") - 1
11007 && strcmp (name
, ".sdata2") == 0)
11008 || (len
== sizeof (".sbss") - 1
11009 && strcmp (name
, ".sbss") == 0)
11010 || (len
== sizeof (".sbss2") - 1
11011 && strcmp (name
, ".sbss2") == 0)
11012 || (len
== sizeof (".PPC.EMB.sdata0") - 1
11013 && strcmp (name
, ".PPC.EMB.sdata0") == 0)
11014 || (len
== sizeof (".PPC.EMB.sbss0") - 1
11015 && strcmp (name
, ".PPC.EMB.sbss0") == 0))))
11017 rtx sym_ref
= XEXP (DECL_RTL (decl
), 0);
11018 size_t len
= strlen (XSTR (sym_ref
, 0));
11019 char *str
= alloca (len
+ 2);
11022 memcpy (str
+ 1, XSTR (sym_ref
, 0), len
+ 1);
11023 XSTR (sym_ref
, 0) = ggc_alloc_string (str
, len
+ 1);
11028 #endif /* USING_ELFOS_H */
11031 /* Return a REG that occurs in ADDR with coefficient 1.
11032 ADDR can be effectively incremented by incrementing REG.
11034 r0 is special and we must not select it as an address
11035 register by this routine since our caller will try to
11036 increment the returned register via an "la" instruction. */
11039 find_addr_reg (addr
)
11042 while (GET_CODE (addr
) == PLUS
)
11044 if (GET_CODE (XEXP (addr
, 0)) == REG
11045 && REGNO (XEXP (addr
, 0)) != 0)
11046 addr
= XEXP (addr
, 0);
11047 else if (GET_CODE (XEXP (addr
, 1)) == REG
11048 && REGNO (XEXP (addr
, 1)) != 0)
11049 addr
= XEXP (addr
, 1);
11050 else if (CONSTANT_P (XEXP (addr
, 0)))
11051 addr
= XEXP (addr
, 1);
11052 else if (CONSTANT_P (XEXP (addr
, 1)))
11053 addr
= XEXP (addr
, 0);
11057 if (GET_CODE (addr
) == REG
&& REGNO (addr
) != 0)
11063 rs6000_fatal_bad_address (op
)
11066 fatal_insn ("bad address", op
);
11069 /* Called to register all of our global variables with the garbage
11073 rs6000_add_gc_roots ()
11075 ggc_add_rtx_root (&rs6000_compare_op0
, 1);
11076 ggc_add_rtx_root (&rs6000_compare_op1
, 1);
11078 toc_hash_table
= htab_create (1021, toc_hash_function
, toc_hash_eq
, NULL
);
11079 ggc_add_root (&toc_hash_table
, 1, sizeof (toc_hash_table
),
11080 toc_hash_mark_table
);
11083 machopic_add_gc_roots ();
11090 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
11091 reference and a constant. */
11094 symbolic_operand (op
)
11097 switch (GET_CODE (op
))
11104 return (GET_CODE (op
) == SYMBOL_REF
||
11105 (GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
11106 || GET_CODE (XEXP (op
, 0)) == LABEL_REF
)
11107 && GET_CODE (XEXP (op
, 1)) == CONST_INT
);
11114 #ifdef RS6000_LONG_BRANCH
11116 static tree stub_list
= 0;
11118 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
11119 procedure calls to the linked list. */
11122 add_compiler_stub (label_name
, function_name
, line_number
)
11124 tree function_name
;
11127 tree stub
= build_tree_list (function_name
, label_name
);
11128 TREE_TYPE (stub
) = build_int_2 (line_number
, 0);
11129 TREE_CHAIN (stub
) = stub_list
;
11133 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
11134 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
11135 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
11137 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
11138 handling procedure calls from the linked list and initializes the
11142 output_compiler_stub ()
11145 char label_buf
[256];
11147 tree tmp_stub
, stub
;
11150 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
11152 fprintf (asm_out_file
,
11153 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub
)));
11155 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11156 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
11157 fprintf (asm_out_file
, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub
));
11158 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11160 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
))[0] == '*')
11162 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
))+1);
11165 label_buf
[0] = '_';
11166 strcpy (label_buf
+1,
11167 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub
)));
11170 strcpy (tmp_buf
, "lis r12,hi16(");
11171 strcat (tmp_buf
, label_buf
);
11172 strcat (tmp_buf
, ")\n\tori r12,r12,lo16(");
11173 strcat (tmp_buf
, label_buf
);
11174 strcat (tmp_buf
, ")\n\tmtctr r12\n\tbctr");
11175 output_asm_insn (tmp_buf
, 0);
11177 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11178 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
11179 fprintf(asm_out_file
, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub
));
11180 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11186 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
11187 already there or not. */
11190 no_previous_def (function_name
)
11191 tree function_name
;
11194 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
11195 if (function_name
== STUB_FUNCTION_NAME (stub
))
11200 /* GET_PREV_LABEL gets the label name from the previous definition of
11204 get_prev_label (function_name
)
11205 tree function_name
;
11208 for (stub
= stub_list
; stub
; stub
= TREE_CHAIN (stub
))
11209 if (function_name
== STUB_FUNCTION_NAME (stub
))
11210 return STUB_LABEL_NAME (stub
);
11214 /* INSN is either a function call or a millicode call. It may have an
11215 unconditional jump in its delay slot.
11217 CALL_DEST is the routine we are calling. */
11220 output_call (insn
, call_dest
, operand_number
)
11223 int operand_number
;
11225 static char buf
[256];
11226 if (GET_CODE (call_dest
) == SYMBOL_REF
&& TARGET_LONG_BRANCH
&& !flag_pic
)
11229 tree funname
= get_identifier (XSTR (call_dest
, 0));
11231 if (no_previous_def (funname
))
11234 rtx label_rtx
= gen_label_rtx ();
11235 char *label_buf
, temp_buf
[256];
11236 ASM_GENERATE_INTERNAL_LABEL (temp_buf
, "L",
11237 CODE_LABEL_NUMBER (label_rtx
));
11238 label_buf
= temp_buf
[0] == '*' ? temp_buf
+ 1 : temp_buf
;
11239 labelname
= get_identifier (label_buf
);
11240 for (; insn
&& GET_CODE (insn
) != NOTE
; insn
= PREV_INSN (insn
));
11242 line_number
= NOTE_LINE_NUMBER (insn
);
11243 add_compiler_stub (labelname
, funname
, line_number
);
11246 labelname
= get_prev_label (funname
);
11248 sprintf (buf
, "jbsr %%z%d,%.246s",
11249 operand_number
, IDENTIFIER_POINTER (labelname
));
11254 sprintf (buf
, "bl %%z%d", operand_number
);
11259 #endif /* RS6000_LONG_BRANCH */
11261 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
11263 const char *const symbol_ = (SYMBOL); \
11264 char *buffer_ = (BUF); \
11265 if (symbol_[0] == '"') \
11267 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
11269 else if (name_needs_quotes(symbol_)) \
11271 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
11275 sprintf(buffer_, "L%d$%s", (N), symbol_); \
11280 /* Generate PIC and indirect symbol stubs. */
11283 machopic_output_stub (file
, symb
, stub
)
11285 const char *symb
, *stub
;
11287 unsigned int length
;
11288 char *symbol_name
, *lazy_ptr_name
;
11289 char *local_label_0
;
11290 static int label
= 0;
11292 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
11293 STRIP_NAME_ENCODING (symb
, symb
);
11297 length
= strlen (symb
);
11298 symbol_name
= alloca (length
+ 32);
11299 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name
, symb
, length
);
11301 lazy_ptr_name
= alloca (length
+ 32);
11302 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name
, symb
, length
);
11304 local_label_0
= alloca (length
+ 32);
11305 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0
, symb
, length
, 0);
11308 machopic_picsymbol_stub_section ();
11310 machopic_symbol_stub_section ();
11312 fprintf (file
, "%s:\n", stub
);
11313 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
11317 fprintf (file
, "\tmflr r0\n");
11318 fprintf (file
, "\tbcl 20,31,%s\n", local_label_0
);
11319 fprintf (file
, "%s:\n\tmflr r11\n", local_label_0
);
11320 fprintf (file
, "\taddis r11,r11,ha16(%s-%s)\n",
11321 lazy_ptr_name
, local_label_0
);
11322 fprintf (file
, "\tmtlr r0\n");
11323 fprintf (file
, "\tlwz r12,lo16(%s-%s)(r11)\n",
11324 lazy_ptr_name
, local_label_0
);
11325 fprintf (file
, "\tmtctr r12\n");
11326 fprintf (file
, "\taddi r11,r11,lo16(%s-%s)\n",
11327 lazy_ptr_name
, local_label_0
);
11328 fprintf (file
, "\tbctr\n");
11331 fprintf (file
, "non-pure not supported\n");
11333 machopic_lazy_symbol_ptr_section ();
11334 fprintf (file
, "%s:\n", lazy_ptr_name
);
11335 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
11336 fprintf (file
, "\t.long dyld_stub_binding_helper\n");
11339 /* Legitimize PIC addresses. If the address is already
11340 position-independent, we return ORIG. Newly generated
11341 position-independent addresses go into a reg. This is REG if non
11342 zero, otherwise we allocate register(s) as necessary. */
11344 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
11347 rs6000_machopic_legitimize_pic_address (orig
, mode
, reg
)
11349 enum machine_mode mode
;
11354 if (reg
== NULL
&& ! reload_in_progress
&& ! reload_completed
)
11355 reg
= gen_reg_rtx (Pmode
);
11357 if (GET_CODE (orig
) == CONST
)
11359 if (GET_CODE (XEXP (orig
, 0)) == PLUS
11360 && XEXP (XEXP (orig
, 0), 0) == pic_offset_table_rtx
)
11363 if (GET_CODE (XEXP (orig
, 0)) == PLUS
)
11366 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 0),
11369 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 1),
11375 if (GET_CODE (offset
) == CONST_INT
)
11377 if (SMALL_INT (offset
))
11378 return plus_constant (base
, INTVAL (offset
));
11379 else if (! reload_in_progress
&& ! reload_completed
)
11380 offset
= force_reg (Pmode
, offset
);
11383 rtx mem
= force_const_mem (Pmode
, orig
);
11384 return machopic_legitimize_pic_address (mem
, Pmode
, reg
);
11387 return gen_rtx (PLUS
, Pmode
, base
, offset
);
11390 /* Fall back on generic machopic code. */
11391 return machopic_legitimize_pic_address (orig
, mode
, reg
);
11394 /* This is just a placeholder to make linking work without having to
11395 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
11396 ever needed for Darwin (not too likely!) this would have to get a
11397 real definition. */
11404 #endif /* TARGET_MACHO */
11407 static unsigned int
11408 rs6000_elf_section_type_flags (decl
, name
, reloc
)
11413 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
11415 if (TARGET_RELOCATABLE
)
11416 flags
|= SECTION_WRITE
;
11421 /* Record an element in the table of global constructors. SYMBOL is
11422 a SYMBOL_REF of the function to be called; PRIORITY is a number
11423 between 0 and MAX_INIT_PRIORITY.
11425 This differs from default_named_section_asm_out_constructor in
11426 that we have special handling for -mrelocatable. */
11429 rs6000_elf_asm_out_constructor (symbol
, priority
)
11433 const char *section
= ".ctors";
11436 if (priority
!= DEFAULT_INIT_PRIORITY
)
11438 sprintf (buf
, ".ctors.%.5u",
11439 /* Invert the numbering so the linker puts us in the proper
11440 order; constructors are run from right to left, and the
11441 linker sorts in increasing order. */
11442 MAX_INIT_PRIORITY
- priority
);
11446 named_section_flags (section
, SECTION_WRITE
);
11447 assemble_align (POINTER_SIZE
);
11449 if (TARGET_RELOCATABLE
)
11451 fputs ("\t.long (", asm_out_file
);
11452 output_addr_const (asm_out_file
, symbol
);
11453 fputs (")@fixup\n", asm_out_file
);
11456 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
11460 rs6000_elf_asm_out_destructor (symbol
, priority
)
11464 const char *section
= ".dtors";
11467 if (priority
!= DEFAULT_INIT_PRIORITY
)
11469 sprintf (buf
, ".dtors.%.5u",
11470 /* Invert the numbering so the linker puts us in the proper
11471 order; constructors are run from right to left, and the
11472 linker sorts in increasing order. */
11473 MAX_INIT_PRIORITY
- priority
);
11477 named_section_flags (section
, SECTION_WRITE
);
11478 assemble_align (POINTER_SIZE
);
11480 if (TARGET_RELOCATABLE
)
11482 fputs ("\t.long (", asm_out_file
);
11483 output_addr_const (asm_out_file
, symbol
);
11484 fputs (")@fixup\n", asm_out_file
);
11487 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
11491 #ifdef OBJECT_FORMAT_COFF
11493 xcoff_asm_named_section (name
, flags
)
11495 unsigned int flags ATTRIBUTE_UNUSED
;
11497 fprintf (asm_out_file
, "\t.csect %s\n", name
);