1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 51 Franklin Street, Fifth Floor, Boston,
21 MA 02110-1301, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 #include "cfglayout.h"
54 #include "sched-int.h"
55 #include "tree-gimple.h"
58 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
61 #include "gstab.h" /* for N_SLINE */
64 #ifndef TARGET_NO_PROTOTYPE
65 #define TARGET_NO_PROTOTYPE 0
68 #define min(A,B) ((A) < (B) ? (A) : (B))
69 #define max(A,B) ((A) > (B) ? (A) : (B))
71 /* Structure used to define the rs6000 stack */
72 typedef struct rs6000_stack
{
73 int first_gp_reg_save
; /* first callee saved GP register used */
74 int first_fp_reg_save
; /* first callee saved FP register used */
75 int first_altivec_reg_save
; /* first callee saved AltiVec register used */
76 int lr_save_p
; /* true if the link reg needs to be saved */
77 int cr_save_p
; /* true if the CR reg needs to be saved */
78 unsigned int vrsave_mask
; /* mask of vec registers to save */
79 int toc_save_p
; /* true if the TOC needs to be saved */
80 int push_p
; /* true if we need to allocate stack space */
81 int calls_p
; /* true if the function makes any calls */
82 int world_save_p
; /* true if we're saving *everything*:
83 r13-r31, cr, f14-f31, vrsave, v20-v31 */
84 enum rs6000_abi abi
; /* which ABI to use */
85 int gp_save_offset
; /* offset to save GP regs from initial SP */
86 int fp_save_offset
; /* offset to save FP regs from initial SP */
87 int altivec_save_offset
; /* offset to save AltiVec regs from initial SP */
88 int lr_save_offset
; /* offset to save LR from initial SP */
89 int cr_save_offset
; /* offset to save CR from initial SP */
90 int vrsave_save_offset
; /* offset to save VRSAVE from initial SP */
91 int spe_gp_save_offset
; /* offset to save spe 64-bit gprs */
92 int toc_save_offset
; /* offset to save the TOC pointer */
93 int varargs_save_offset
; /* offset to save the varargs registers */
94 int ehrd_offset
; /* offset to EH return data */
95 int reg_size
; /* register size (4 or 8) */
96 HOST_WIDE_INT vars_size
; /* variable save area size */
97 int parm_size
; /* outgoing parameter size */
98 int save_size
; /* save area size */
99 int fixed_size
; /* fixed size of stack frame */
100 int gp_size
; /* size of saved GP registers */
101 int fp_size
; /* size of saved FP registers */
102 int altivec_size
; /* size of saved AltiVec registers */
103 int cr_size
; /* size to hold CR if not in save_size */
104 int lr_size
; /* size to hold LR if not in save_size */
105 int vrsave_size
; /* size to hold VRSAVE if not in save_size */
106 int altivec_padding_size
; /* size of altivec alignment padding if
108 int spe_gp_size
; /* size of 64-bit GPR save size for SPE */
109 int spe_padding_size
;
110 int toc_size
; /* size to hold TOC if not in save_size */
111 HOST_WIDE_INT total_size
; /* total bytes allocated for stack */
112 int spe_64bit_regs_used
;
115 /* A C structure for machine-specific, per-function data.
116 This is added to the cfun structure. */
117 typedef struct machine_function
GTY(())
119 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
120 int ra_needs_full_frame
;
121 /* Some local-dynamic symbol. */
122 const char *some_ld_name
;
123 /* Whether the instruction chain has been scanned already. */
124 int insn_chain_scanned_p
;
125 /* Flags if __builtin_return_address (0) was used. */
127 /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
128 varargs save area. */
129 HOST_WIDE_INT varargs_save_offset
;
132 /* Target cpu type */
134 enum processor_type rs6000_cpu
;
135 struct rs6000_cpu_select rs6000_select
[3] =
137 /* switch name, tune arch */
138 { (const char *)0, "--with-cpu=", 1, 1 },
139 { (const char *)0, "-mcpu=", 1, 1 },
140 { (const char *)0, "-mtune=", 1, 0 },
143 /* Always emit branch hint bits. */
144 static GTY(()) bool rs6000_always_hint
;
146 /* Schedule instructions for group formation. */
147 static GTY(()) bool rs6000_sched_groups
;
149 /* Support for -msched-costly-dep option. */
150 const char *rs6000_sched_costly_dep_str
;
151 enum rs6000_dependence_cost rs6000_sched_costly_dep
;
153 /* Support for -minsert-sched-nops option. */
154 const char *rs6000_sched_insert_nops_str
;
155 enum rs6000_nop_insertion rs6000_sched_insert_nops
;
157 /* Support targetm.vectorize.builtin_mask_for_load. */
158 static GTY(()) tree altivec_builtin_mask_for_load
;
160 /* Size of long double */
161 int rs6000_long_double_type_size
;
163 /* Whether -mabi=altivec has appeared */
164 int rs6000_altivec_abi
;
166 /* Nonzero if we want SPE ABI extensions. */
169 /* Nonzero if floating point operations are done in the GPRs. */
170 int rs6000_float_gprs
= 0;
172 /* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
173 int rs6000_darwin64_abi
;
175 /* Set to nonzero once AIX common-mode calls have been defined. */
176 static GTY(()) int common_mode_defined
;
178 /* Save information from a "cmpxx" operation until the branch or scc is
180 rtx rs6000_compare_op0
, rs6000_compare_op1
;
181 int rs6000_compare_fp_p
;
183 /* Label number of label created for -mrelocatable, to call to so we can
184 get the address of the GOT section */
185 int rs6000_pic_labelno
;
188 /* Which abi to adhere to */
189 const char *rs6000_abi_name
;
191 /* Semantics of the small data area */
192 enum rs6000_sdata_type rs6000_sdata
= SDATA_DATA
;
194 /* Which small data model to use */
195 const char *rs6000_sdata_name
= (char *)0;
197 /* Counter for labels which are to be placed in .fixup. */
198 int fixuplabelno
= 0;
201 /* Bit size of immediate TLS offsets and string from which it is decoded. */
202 int rs6000_tls_size
= 32;
203 const char *rs6000_tls_size_string
;
205 /* ABI enumeration available for subtarget to use. */
206 enum rs6000_abi rs6000_current_abi
;
208 /* Whether to use variant of AIX ABI for PowerPC64 Linux. */
212 const char *rs6000_debug_name
;
213 int rs6000_debug_stack
; /* debug stack applications */
214 int rs6000_debug_arg
; /* debug argument handling */
216 /* Value is TRUE if register/mode pair is acceptable. */
217 bool rs6000_hard_regno_mode_ok_p
[NUM_MACHINE_MODES
][FIRST_PSEUDO_REGISTER
];
219 /* Built in types. */
221 tree rs6000_builtin_types
[RS6000_BTI_MAX
];
222 tree rs6000_builtin_decls
[RS6000_BUILTIN_COUNT
];
224 const char *rs6000_traceback_name
;
226 traceback_default
= 0,
232 /* Flag to say the TOC is initialized */
234 char toc_label_name
[10];
236 /* Alias set for saves and restores from the rs6000 stack. */
237 static GTY(()) int rs6000_sr_alias_set
;
239 /* Control alignment for fields within structures. */
240 /* String from -malign-XXXXX. */
241 int rs6000_alignment_flags
;
243 /* True for any options that were explicitly set. */
245 bool aix_struct_ret
; /* True if -maix-struct-ret was used. */
246 bool alignment
; /* True if -malign- was used. */
247 bool abi
; /* True if -mabi= was used. */
248 bool spe
; /* True if -mspe= was used. */
249 bool float_gprs
; /* True if -mfloat-gprs= was used. */
250 bool isel
; /* True if -misel was used. */
251 bool long_double
; /* True if -mlong-double- was used. */
252 } rs6000_explicit_options
;
254 struct builtin_description
256 /* mask is not const because we're going to alter it below. This
257 nonsense will go away when we rewrite the -march infrastructure
258 to give us more target flag bits. */
260 const enum insn_code icode
;
261 const char *const name
;
262 const enum rs6000_builtins code
;
265 /* Target cpu costs. */
267 struct processor_costs
{
268 const int mulsi
; /* cost of SImode multiplication. */
269 const int mulsi_const
; /* cost of SImode multiplication by constant. */
270 const int mulsi_const9
; /* cost of SImode mult by short constant. */
271 const int muldi
; /* cost of DImode multiplication. */
272 const int divsi
; /* cost of SImode division. */
273 const int divdi
; /* cost of DImode division. */
274 const int fp
; /* cost of simple SFmode and DFmode insns. */
275 const int dmul
; /* cost of DFmode multiplication (and fmadd). */
276 const int sdiv
; /* cost of SFmode division (fdivs). */
277 const int ddiv
; /* cost of DFmode division (fdiv). */
280 const struct processor_costs
*rs6000_cost
;
282 /* Processor costs (relative to an add) */
284 /* Instruction size costs on 32bit processors. */
286 struct processor_costs size32_cost
= {
287 COSTS_N_INSNS (1), /* mulsi */
288 COSTS_N_INSNS (1), /* mulsi_const */
289 COSTS_N_INSNS (1), /* mulsi_const9 */
290 COSTS_N_INSNS (1), /* muldi */
291 COSTS_N_INSNS (1), /* divsi */
292 COSTS_N_INSNS (1), /* divdi */
293 COSTS_N_INSNS (1), /* fp */
294 COSTS_N_INSNS (1), /* dmul */
295 COSTS_N_INSNS (1), /* sdiv */
296 COSTS_N_INSNS (1), /* ddiv */
299 /* Instruction size costs on 64bit processors. */
301 struct processor_costs size64_cost
= {
302 COSTS_N_INSNS (1), /* mulsi */
303 COSTS_N_INSNS (1), /* mulsi_const */
304 COSTS_N_INSNS (1), /* mulsi_const9 */
305 COSTS_N_INSNS (1), /* muldi */
306 COSTS_N_INSNS (1), /* divsi */
307 COSTS_N_INSNS (1), /* divdi */
308 COSTS_N_INSNS (1), /* fp */
309 COSTS_N_INSNS (1), /* dmul */
310 COSTS_N_INSNS (1), /* sdiv */
311 COSTS_N_INSNS (1), /* ddiv */
314 /* Instruction costs on RIOS1 processors. */
316 struct processor_costs rios1_cost
= {
317 COSTS_N_INSNS (5), /* mulsi */
318 COSTS_N_INSNS (4), /* mulsi_const */
319 COSTS_N_INSNS (3), /* mulsi_const9 */
320 COSTS_N_INSNS (5), /* muldi */
321 COSTS_N_INSNS (19), /* divsi */
322 COSTS_N_INSNS (19), /* divdi */
323 COSTS_N_INSNS (2), /* fp */
324 COSTS_N_INSNS (2), /* dmul */
325 COSTS_N_INSNS (19), /* sdiv */
326 COSTS_N_INSNS (19), /* ddiv */
329 /* Instruction costs on RIOS2 processors. */
331 struct processor_costs rios2_cost
= {
332 COSTS_N_INSNS (2), /* mulsi */
333 COSTS_N_INSNS (2), /* mulsi_const */
334 COSTS_N_INSNS (2), /* mulsi_const9 */
335 COSTS_N_INSNS (2), /* muldi */
336 COSTS_N_INSNS (13), /* divsi */
337 COSTS_N_INSNS (13), /* divdi */
338 COSTS_N_INSNS (2), /* fp */
339 COSTS_N_INSNS (2), /* dmul */
340 COSTS_N_INSNS (17), /* sdiv */
341 COSTS_N_INSNS (17), /* ddiv */
344 /* Instruction costs on RS64A processors. */
346 struct processor_costs rs64a_cost
= {
347 COSTS_N_INSNS (20), /* mulsi */
348 COSTS_N_INSNS (12), /* mulsi_const */
349 COSTS_N_INSNS (8), /* mulsi_const9 */
350 COSTS_N_INSNS (34), /* muldi */
351 COSTS_N_INSNS (65), /* divsi */
352 COSTS_N_INSNS (67), /* divdi */
353 COSTS_N_INSNS (4), /* fp */
354 COSTS_N_INSNS (4), /* dmul */
355 COSTS_N_INSNS (31), /* sdiv */
356 COSTS_N_INSNS (31), /* ddiv */
359 /* Instruction costs on MPCCORE processors. */
361 struct processor_costs mpccore_cost
= {
362 COSTS_N_INSNS (2), /* mulsi */
363 COSTS_N_INSNS (2), /* mulsi_const */
364 COSTS_N_INSNS (2), /* mulsi_const9 */
365 COSTS_N_INSNS (2), /* muldi */
366 COSTS_N_INSNS (6), /* divsi */
367 COSTS_N_INSNS (6), /* divdi */
368 COSTS_N_INSNS (4), /* fp */
369 COSTS_N_INSNS (5), /* dmul */
370 COSTS_N_INSNS (10), /* sdiv */
371 COSTS_N_INSNS (17), /* ddiv */
374 /* Instruction costs on PPC403 processors. */
376 struct processor_costs ppc403_cost
= {
377 COSTS_N_INSNS (4), /* mulsi */
378 COSTS_N_INSNS (4), /* mulsi_const */
379 COSTS_N_INSNS (4), /* mulsi_const9 */
380 COSTS_N_INSNS (4), /* muldi */
381 COSTS_N_INSNS (33), /* divsi */
382 COSTS_N_INSNS (33), /* divdi */
383 COSTS_N_INSNS (11), /* fp */
384 COSTS_N_INSNS (11), /* dmul */
385 COSTS_N_INSNS (11), /* sdiv */
386 COSTS_N_INSNS (11), /* ddiv */
389 /* Instruction costs on PPC405 processors. */
391 struct processor_costs ppc405_cost
= {
392 COSTS_N_INSNS (5), /* mulsi */
393 COSTS_N_INSNS (4), /* mulsi_const */
394 COSTS_N_INSNS (3), /* mulsi_const9 */
395 COSTS_N_INSNS (5), /* muldi */
396 COSTS_N_INSNS (35), /* divsi */
397 COSTS_N_INSNS (35), /* divdi */
398 COSTS_N_INSNS (11), /* fp */
399 COSTS_N_INSNS (11), /* dmul */
400 COSTS_N_INSNS (11), /* sdiv */
401 COSTS_N_INSNS (11), /* ddiv */
404 /* Instruction costs on PPC440 processors. */
406 struct processor_costs ppc440_cost
= {
407 COSTS_N_INSNS (3), /* mulsi */
408 COSTS_N_INSNS (2), /* mulsi_const */
409 COSTS_N_INSNS (2), /* mulsi_const9 */
410 COSTS_N_INSNS (3), /* muldi */
411 COSTS_N_INSNS (34), /* divsi */
412 COSTS_N_INSNS (34), /* divdi */
413 COSTS_N_INSNS (5), /* fp */
414 COSTS_N_INSNS (5), /* dmul */
415 COSTS_N_INSNS (19), /* sdiv */
416 COSTS_N_INSNS (33), /* ddiv */
419 /* Instruction costs on PPC601 processors. */
421 struct processor_costs ppc601_cost
= {
422 COSTS_N_INSNS (5), /* mulsi */
423 COSTS_N_INSNS (5), /* mulsi_const */
424 COSTS_N_INSNS (5), /* mulsi_const9 */
425 COSTS_N_INSNS (5), /* muldi */
426 COSTS_N_INSNS (36), /* divsi */
427 COSTS_N_INSNS (36), /* divdi */
428 COSTS_N_INSNS (4), /* fp */
429 COSTS_N_INSNS (5), /* dmul */
430 COSTS_N_INSNS (17), /* sdiv */
431 COSTS_N_INSNS (31), /* ddiv */
434 /* Instruction costs on PPC603 processors. */
436 struct processor_costs ppc603_cost
= {
437 COSTS_N_INSNS (5), /* mulsi */
438 COSTS_N_INSNS (3), /* mulsi_const */
439 COSTS_N_INSNS (2), /* mulsi_const9 */
440 COSTS_N_INSNS (5), /* muldi */
441 COSTS_N_INSNS (37), /* divsi */
442 COSTS_N_INSNS (37), /* divdi */
443 COSTS_N_INSNS (3), /* fp */
444 COSTS_N_INSNS (4), /* dmul */
445 COSTS_N_INSNS (18), /* sdiv */
446 COSTS_N_INSNS (33), /* ddiv */
449 /* Instruction costs on PPC604 processors. */
451 struct processor_costs ppc604_cost
= {
452 COSTS_N_INSNS (4), /* mulsi */
453 COSTS_N_INSNS (4), /* mulsi_const */
454 COSTS_N_INSNS (4), /* mulsi_const9 */
455 COSTS_N_INSNS (4), /* muldi */
456 COSTS_N_INSNS (20), /* divsi */
457 COSTS_N_INSNS (20), /* divdi */
458 COSTS_N_INSNS (3), /* fp */
459 COSTS_N_INSNS (3), /* dmul */
460 COSTS_N_INSNS (18), /* sdiv */
461 COSTS_N_INSNS (32), /* ddiv */
464 /* Instruction costs on PPC604e processors. */
466 struct processor_costs ppc604e_cost
= {
467 COSTS_N_INSNS (2), /* mulsi */
468 COSTS_N_INSNS (2), /* mulsi_const */
469 COSTS_N_INSNS (2), /* mulsi_const9 */
470 COSTS_N_INSNS (2), /* muldi */
471 COSTS_N_INSNS (20), /* divsi */
472 COSTS_N_INSNS (20), /* divdi */
473 COSTS_N_INSNS (3), /* fp */
474 COSTS_N_INSNS (3), /* dmul */
475 COSTS_N_INSNS (18), /* sdiv */
476 COSTS_N_INSNS (32), /* ddiv */
479 /* Instruction costs on PPC620 processors. */
481 struct processor_costs ppc620_cost
= {
482 COSTS_N_INSNS (5), /* mulsi */
483 COSTS_N_INSNS (4), /* mulsi_const */
484 COSTS_N_INSNS (3), /* mulsi_const9 */
485 COSTS_N_INSNS (7), /* muldi */
486 COSTS_N_INSNS (21), /* divsi */
487 COSTS_N_INSNS (37), /* divdi */
488 COSTS_N_INSNS (3), /* fp */
489 COSTS_N_INSNS (3), /* dmul */
490 COSTS_N_INSNS (18), /* sdiv */
491 COSTS_N_INSNS (32), /* ddiv */
494 /* Instruction costs on PPC630 processors. */
496 struct processor_costs ppc630_cost
= {
497 COSTS_N_INSNS (5), /* mulsi */
498 COSTS_N_INSNS (4), /* mulsi_const */
499 COSTS_N_INSNS (3), /* mulsi_const9 */
500 COSTS_N_INSNS (7), /* muldi */
501 COSTS_N_INSNS (21), /* divsi */
502 COSTS_N_INSNS (37), /* divdi */
503 COSTS_N_INSNS (3), /* fp */
504 COSTS_N_INSNS (3), /* dmul */
505 COSTS_N_INSNS (17), /* sdiv */
506 COSTS_N_INSNS (21), /* ddiv */
509 /* Instruction costs on PPC750 and PPC7400 processors. */
511 struct processor_costs ppc750_cost
= {
512 COSTS_N_INSNS (5), /* mulsi */
513 COSTS_N_INSNS (3), /* mulsi_const */
514 COSTS_N_INSNS (2), /* mulsi_const9 */
515 COSTS_N_INSNS (5), /* muldi */
516 COSTS_N_INSNS (17), /* divsi */
517 COSTS_N_INSNS (17), /* divdi */
518 COSTS_N_INSNS (3), /* fp */
519 COSTS_N_INSNS (3), /* dmul */
520 COSTS_N_INSNS (17), /* sdiv */
521 COSTS_N_INSNS (31), /* ddiv */
524 /* Instruction costs on PPC7450 processors. */
526 struct processor_costs ppc7450_cost
= {
527 COSTS_N_INSNS (4), /* mulsi */
528 COSTS_N_INSNS (3), /* mulsi_const */
529 COSTS_N_INSNS (3), /* mulsi_const9 */
530 COSTS_N_INSNS (4), /* muldi */
531 COSTS_N_INSNS (23), /* divsi */
532 COSTS_N_INSNS (23), /* divdi */
533 COSTS_N_INSNS (5), /* fp */
534 COSTS_N_INSNS (5), /* dmul */
535 COSTS_N_INSNS (21), /* sdiv */
536 COSTS_N_INSNS (35), /* ddiv */
539 /* Instruction costs on PPC8540 processors. */
541 struct processor_costs ppc8540_cost
= {
542 COSTS_N_INSNS (4), /* mulsi */
543 COSTS_N_INSNS (4), /* mulsi_const */
544 COSTS_N_INSNS (4), /* mulsi_const9 */
545 COSTS_N_INSNS (4), /* muldi */
546 COSTS_N_INSNS (19), /* divsi */
547 COSTS_N_INSNS (19), /* divdi */
548 COSTS_N_INSNS (4), /* fp */
549 COSTS_N_INSNS (4), /* dmul */
550 COSTS_N_INSNS (29), /* sdiv */
551 COSTS_N_INSNS (29), /* ddiv */
554 /* Instruction costs on POWER4 and POWER5 processors. */
556 struct processor_costs power4_cost
= {
557 COSTS_N_INSNS (3), /* mulsi */
558 COSTS_N_INSNS (2), /* mulsi_const */
559 COSTS_N_INSNS (2), /* mulsi_const9 */
560 COSTS_N_INSNS (4), /* muldi */
561 COSTS_N_INSNS (18), /* divsi */
562 COSTS_N_INSNS (34), /* divdi */
563 COSTS_N_INSNS (3), /* fp */
564 COSTS_N_INSNS (3), /* dmul */
565 COSTS_N_INSNS (17), /* sdiv */
566 COSTS_N_INSNS (17), /* ddiv */
570 static bool rs6000_function_ok_for_sibcall (tree
, tree
);
571 static const char *rs6000_invalid_within_doloop (rtx
);
572 static rtx
rs6000_generate_compare (enum rtx_code
);
573 static void rs6000_maybe_dead (rtx
);
574 static void rs6000_emit_stack_tie (void);
575 static void rs6000_frame_related (rtx
, rtx
, HOST_WIDE_INT
, rtx
, rtx
);
576 static rtx
spe_synthesize_frame_save (rtx
);
577 static bool spe_func_has_64bit_regs_p (void);
578 static void emit_frame_save (rtx
, rtx
, enum machine_mode
, unsigned int,
580 static rtx
gen_frame_mem_offset (enum machine_mode
, rtx
, int);
581 static void rs6000_emit_allocate_stack (HOST_WIDE_INT
, int);
582 static unsigned rs6000_hash_constant (rtx
);
583 static unsigned toc_hash_function (const void *);
584 static int toc_hash_eq (const void *, const void *);
585 static int constant_pool_expr_1 (rtx
, int *, int *);
586 static bool constant_pool_expr_p (rtx
);
587 static bool legitimate_small_data_p (enum machine_mode
, rtx
);
588 static bool legitimate_indexed_address_p (rtx
, int);
589 static bool legitimate_lo_sum_address_p (enum machine_mode
, rtx
, int);
590 static struct machine_function
* rs6000_init_machine_status (void);
591 static bool rs6000_assemble_integer (rtx
, unsigned int, int);
592 static bool no_global_regs_above (int);
593 #ifdef HAVE_GAS_HIDDEN
594 static void rs6000_assemble_visibility (tree
, int);
596 static int rs6000_ra_ever_killed (void);
597 static tree
rs6000_handle_longcall_attribute (tree
*, tree
, tree
, int, bool *);
598 static tree
rs6000_handle_altivec_attribute (tree
*, tree
, tree
, int, bool *);
599 static void rs6000_eliminate_indexed_memrefs (rtx operands
[2]);
600 static const char *rs6000_mangle_fundamental_type (tree
);
601 extern const struct attribute_spec rs6000_attribute_table
[];
602 static void rs6000_set_default_type_attributes (tree
);
603 static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT
);
604 static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT
);
605 static void rs6000_output_mi_thunk (FILE *, tree
, HOST_WIDE_INT
, HOST_WIDE_INT
,
607 static rtx
rs6000_emit_set_long_const (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
);
608 static bool rs6000_return_in_memory (tree
, tree
);
609 static void rs6000_file_start (void);
611 static unsigned int rs6000_elf_section_type_flags (tree
, const char *, int);
612 static void rs6000_elf_asm_out_constructor (rtx
, int);
613 static void rs6000_elf_asm_out_destructor (rtx
, int);
614 static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED
;
615 static void rs6000_elf_select_section (tree
, int, unsigned HOST_WIDE_INT
);
616 static void rs6000_elf_unique_section (tree
, int);
617 static void rs6000_elf_select_rtx_section (enum machine_mode
, rtx
,
618 unsigned HOST_WIDE_INT
);
619 static void rs6000_elf_encode_section_info (tree
, rtx
, int)
621 static bool rs6000_elf_in_small_data_p (tree
);
624 static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
625 static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree
);
626 static void rs6000_xcoff_select_section (tree
, int, unsigned HOST_WIDE_INT
);
627 static void rs6000_xcoff_unique_section (tree
, int);
628 static void rs6000_xcoff_select_rtx_section (enum machine_mode
, rtx
,
629 unsigned HOST_WIDE_INT
);
630 static const char * rs6000_xcoff_strip_name_encoding (const char *);
631 static unsigned int rs6000_xcoff_section_type_flags (tree
, const char *, int);
632 static void rs6000_xcoff_file_start (void);
633 static void rs6000_xcoff_file_end (void);
635 static int rs6000_variable_issue (FILE *, int, rtx
, int);
636 static bool rs6000_rtx_costs (rtx
, int, int, int *);
637 static int rs6000_adjust_cost (rtx
, rtx
, rtx
, int);
638 static bool is_microcoded_insn (rtx
);
639 static int is_dispatch_slot_restricted (rtx
);
640 static bool is_cracked_insn (rtx
);
641 static bool is_branch_slot_insn (rtx
);
642 static int rs6000_adjust_priority (rtx
, int);
643 static int rs6000_issue_rate (void);
644 static bool rs6000_is_costly_dependence (rtx
, rtx
, rtx
, int, int);
645 static rtx
get_next_active_insn (rtx
, rtx
);
646 static bool insn_terminates_group_p (rtx
, enum group_termination
);
647 static bool is_costly_group (rtx
*, rtx
);
648 static int force_new_group (int, FILE *, rtx
*, rtx
, bool *, int, int *);
649 static int redefine_groups (FILE *, int, rtx
, rtx
);
650 static int pad_groups (FILE *, int, rtx
, rtx
);
651 static void rs6000_sched_finish (FILE *, int);
652 static int rs6000_use_sched_lookahead (void);
653 static tree
rs6000_builtin_mask_for_load (void);
655 static void def_builtin (int, const char *, tree
, int);
656 static void rs6000_init_builtins (void);
657 static rtx
rs6000_expand_unop_builtin (enum insn_code
, tree
, rtx
);
658 static rtx
rs6000_expand_binop_builtin (enum insn_code
, tree
, rtx
);
659 static rtx
rs6000_expand_ternop_builtin (enum insn_code
, tree
, rtx
);
660 static rtx
rs6000_expand_builtin (tree
, rtx
, rtx
, enum machine_mode
, int);
661 static void altivec_init_builtins (void);
662 static void rs6000_common_init_builtins (void);
663 static void rs6000_init_libfuncs (void);
665 static void enable_mask_for_builtins (struct builtin_description
*, int,
666 enum rs6000_builtins
,
667 enum rs6000_builtins
);
668 static tree
build_opaque_vector_type (tree
, int);
669 static void spe_init_builtins (void);
670 static rtx
spe_expand_builtin (tree
, rtx
, bool *);
671 static rtx
spe_expand_stv_builtin (enum insn_code
, tree
);
672 static rtx
spe_expand_predicate_builtin (enum insn_code
, tree
, rtx
);
673 static rtx
spe_expand_evsel_builtin (enum insn_code
, tree
, rtx
);
674 static int rs6000_emit_int_cmove (rtx
, rtx
, rtx
, rtx
);
675 static rs6000_stack_t
*rs6000_stack_info (void);
676 static void debug_stack_info (rs6000_stack_t
*);
678 static rtx
altivec_expand_builtin (tree
, rtx
, bool *);
679 static rtx
altivec_expand_ld_builtin (tree
, rtx
, bool *);
680 static rtx
altivec_expand_st_builtin (tree
, rtx
, bool *);
681 static rtx
altivec_expand_dst_builtin (tree
, rtx
, bool *);
682 static rtx
altivec_expand_abs_builtin (enum insn_code
, tree
, rtx
);
683 static rtx
altivec_expand_predicate_builtin (enum insn_code
,
684 const char *, tree
, rtx
);
685 static rtx
altivec_expand_lv_builtin (enum insn_code
, tree
, rtx
);
686 static rtx
altivec_expand_stv_builtin (enum insn_code
, tree
);
687 static bool rs6000_handle_option (size_t, const char *, int);
688 static void rs6000_parse_tls_size_option (void);
689 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
690 static int first_altivec_reg_to_save (void);
691 static unsigned int compute_vrsave_mask (void);
692 static void compute_save_world_info (rs6000_stack_t
*info_ptr
);
693 static void is_altivec_return_reg (rtx
, void *);
694 static rtx
generate_set_vrsave (rtx
, rs6000_stack_t
*, int);
695 int easy_vector_constant (rtx
, enum machine_mode
);
696 static bool rs6000_is_opaque_type (tree
);
697 static rtx
rs6000_dwarf_register_span (rtx
);
698 static rtx
rs6000_legitimize_tls_address (rtx
, enum tls_model
);
699 static void rs6000_output_dwarf_dtprel (FILE *, int, rtx
) ATTRIBUTE_UNUSED
;
700 static rtx
rs6000_tls_get_addr (void);
701 static rtx
rs6000_got_sym (void);
702 static int rs6000_tls_symbol_ref_1 (rtx
*, void *);
703 static const char *rs6000_get_some_local_dynamic_name (void);
704 static int rs6000_get_some_local_dynamic_name_1 (rtx
*, void *);
705 static rtx
rs6000_complex_function_value (enum machine_mode
);
706 static rtx
rs6000_spe_function_arg (CUMULATIVE_ARGS
*,
707 enum machine_mode
, tree
);
708 static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS
*,
710 static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS
*,
711 tree
, HOST_WIDE_INT
);
712 static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS
*,
715 static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS
*,
718 static rtx
rs6000_darwin64_record_arg (CUMULATIVE_ARGS
*, tree
, int, bool);
719 static rtx
rs6000_mixed_function_arg (enum machine_mode
, tree
, int);
720 static void rs6000_move_block_from_reg (int regno
, rtx x
, int nregs
);
721 static void setup_incoming_varargs (CUMULATIVE_ARGS
*,
722 enum machine_mode
, tree
,
724 static bool rs6000_pass_by_reference (CUMULATIVE_ARGS
*, enum machine_mode
,
726 static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS
*, enum machine_mode
,
728 static const char *invalid_arg_for_unprototyped_fn (tree
, tree
, tree
);
730 static void macho_branch_islands (void);
731 static void add_compiler_branch_island (tree
, tree
, int);
732 static int no_previous_def (tree function_name
);
733 static tree
get_prev_label (tree function_name
);
734 static void rs6000_darwin_file_start (void);
737 static tree
rs6000_build_builtin_va_list (void);
738 static tree
rs6000_gimplify_va_arg (tree
, tree
, tree
*, tree
*);
739 static bool rs6000_must_pass_in_stack (enum machine_mode
, tree
);
740 static bool rs6000_vector_mode_supported_p (enum machine_mode
);
741 static int get_vec_cmp_insn (enum rtx_code
, enum machine_mode
,
743 static rtx
rs6000_emit_vector_compare (enum rtx_code
, rtx
, rtx
,
745 static int get_vsel_insn (enum machine_mode
);
746 static void rs6000_emit_vector_select (rtx
, rtx
, rtx
, rtx
);
747 static tree
rs6000_stack_protect_fail (void);
749 const int INSN_NOT_AVAILABLE
= -1;
750 static enum machine_mode
rs6000_eh_return_filter_mode (void);
752 /* Hash table stuff for keeping track of TOC entries. */
754 struct toc_hash_struct
GTY(())
756 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
757 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
759 enum machine_mode key_mode
;
763 static GTY ((param_is (struct toc_hash_struct
))) htab_t toc_hash_table
;
765 /* Default register names. */
766 char rs6000_reg_names
[][8] =
768 "0", "1", "2", "3", "4", "5", "6", "7",
769 "8", "9", "10", "11", "12", "13", "14", "15",
770 "16", "17", "18", "19", "20", "21", "22", "23",
771 "24", "25", "26", "27", "28", "29", "30", "31",
772 "0", "1", "2", "3", "4", "5", "6", "7",
773 "8", "9", "10", "11", "12", "13", "14", "15",
774 "16", "17", "18", "19", "20", "21", "22", "23",
775 "24", "25", "26", "27", "28", "29", "30", "31",
776 "mq", "lr", "ctr","ap",
777 "0", "1", "2", "3", "4", "5", "6", "7",
779 /* AltiVec registers. */
780 "0", "1", "2", "3", "4", "5", "6", "7",
781 "8", "9", "10", "11", "12", "13", "14", "15",
782 "16", "17", "18", "19", "20", "21", "22", "23",
783 "24", "25", "26", "27", "28", "29", "30", "31",
786 "spe_acc", "spefscr",
787 /* Soft frame pointer. */
791 #ifdef TARGET_REGNAMES
792 static const char alt_reg_names
[][8] =
794 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
795 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
796 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
797 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
798 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
799 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
800 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
801 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
802 "mq", "lr", "ctr", "ap",
803 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
805 /* AltiVec registers. */
806 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
807 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
808 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
809 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
812 "spe_acc", "spefscr",
813 /* Soft frame pointer. */
818 #ifndef MASK_STRICT_ALIGN
819 #define MASK_STRICT_ALIGN 0
821 #ifndef TARGET_PROFILE_KERNEL
822 #define TARGET_PROFILE_KERNEL 0
825 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
826 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
828 /* Initialize the GCC target structure. */
829 #undef TARGET_ATTRIBUTE_TABLE
830 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
831 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
832 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
834 #undef TARGET_ASM_ALIGNED_DI_OP
835 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
837 /* Default unaligned ops are only provided for ELF. Find the ops needed
838 for non-ELF systems. */
839 #ifndef OBJECT_FORMAT_ELF
841 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
843 #undef TARGET_ASM_UNALIGNED_HI_OP
844 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
845 #undef TARGET_ASM_UNALIGNED_SI_OP
846 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
847 #undef TARGET_ASM_UNALIGNED_DI_OP
848 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
851 #undef TARGET_ASM_UNALIGNED_HI_OP
852 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
853 #undef TARGET_ASM_UNALIGNED_SI_OP
854 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
855 #undef TARGET_ASM_UNALIGNED_DI_OP
856 #define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
857 #undef TARGET_ASM_ALIGNED_DI_OP
858 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
862 /* This hook deals with fixups for relocatable code and DI-mode objects
864 #undef TARGET_ASM_INTEGER
865 #define TARGET_ASM_INTEGER rs6000_assemble_integer
867 #ifdef HAVE_GAS_HIDDEN
868 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
869 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
872 #undef TARGET_HAVE_TLS
873 #define TARGET_HAVE_TLS HAVE_AS_TLS
875 #undef TARGET_CANNOT_FORCE_CONST_MEM
876 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
878 #undef TARGET_ASM_FUNCTION_PROLOGUE
879 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
880 #undef TARGET_ASM_FUNCTION_EPILOGUE
881 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
883 #undef TARGET_SCHED_VARIABLE_ISSUE
884 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
886 #undef TARGET_SCHED_ISSUE_RATE
887 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
888 #undef TARGET_SCHED_ADJUST_COST
889 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
890 #undef TARGET_SCHED_ADJUST_PRIORITY
891 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
892 #undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
893 #define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
894 #undef TARGET_SCHED_FINISH
895 #define TARGET_SCHED_FINISH rs6000_sched_finish
897 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
898 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
900 #undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
901 #define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
903 #undef TARGET_INIT_BUILTINS
904 #define TARGET_INIT_BUILTINS rs6000_init_builtins
906 #undef TARGET_EXPAND_BUILTIN
907 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
909 #undef TARGET_MANGLE_FUNDAMENTAL_TYPE
910 #define TARGET_MANGLE_FUNDAMENTAL_TYPE rs6000_mangle_fundamental_type
912 #undef TARGET_INIT_LIBFUNCS
913 #define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
916 #undef TARGET_BINDS_LOCAL_P
917 #define TARGET_BINDS_LOCAL_P darwin_binds_local_p
920 #undef TARGET_ASM_OUTPUT_MI_THUNK
921 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
923 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
924 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
926 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
927 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
929 #undef TARGET_INVALID_WITHIN_DOLOOP
930 #define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
932 #undef TARGET_RTX_COSTS
933 #define TARGET_RTX_COSTS rs6000_rtx_costs
934 #undef TARGET_ADDRESS_COST
935 #define TARGET_ADDRESS_COST hook_int_rtx_0
937 #undef TARGET_VECTOR_OPAQUE_P
938 #define TARGET_VECTOR_OPAQUE_P rs6000_is_opaque_type
940 #undef TARGET_DWARF_REGISTER_SPAN
941 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
943 /* On rs6000, function arguments are promoted, as are function return
945 #undef TARGET_PROMOTE_FUNCTION_ARGS
946 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
947 #undef TARGET_PROMOTE_FUNCTION_RETURN
948 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
950 #undef TARGET_RETURN_IN_MEMORY
951 #define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
953 #undef TARGET_SETUP_INCOMING_VARARGS
954 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
956 /* Always strict argument naming on rs6000. */
957 #undef TARGET_STRICT_ARGUMENT_NAMING
958 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
959 #undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
960 #define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
961 #undef TARGET_SPLIT_COMPLEX_ARG
962 #define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
963 #undef TARGET_MUST_PASS_IN_STACK
964 #define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
965 #undef TARGET_PASS_BY_REFERENCE
966 #define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
967 #undef TARGET_ARG_PARTIAL_BYTES
968 #define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
970 #undef TARGET_BUILD_BUILTIN_VA_LIST
971 #define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
973 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
974 #define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
976 #undef TARGET_EH_RETURN_FILTER_MODE
977 #define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
979 #undef TARGET_VECTOR_MODE_SUPPORTED_P
980 #define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
982 #undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
983 #define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
985 #undef TARGET_HANDLE_OPTION
986 #define TARGET_HANDLE_OPTION rs6000_handle_option
988 #undef TARGET_DEFAULT_TARGET_FLAGS
989 #define TARGET_DEFAULT_TARGET_FLAGS \
990 (TARGET_DEFAULT | MASK_SCHED_PROLOG)
992 #undef TARGET_STACK_PROTECT_FAIL
993 #define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
995 /* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
996 The PowerPC architecture requires only weak consistency among
997 processors--that is, memory accesses between processors need not be
998 sequentially consistent and memory accesses among processors can occur
999 in any order. The ability to order memory accesses weakly provides
1000 opportunities for more efficient use of the system bus. Unless a
1001 dependency exists, the 604e allows read operations to precede store
1003 #undef TARGET_RELAXED_ORDERING
1004 #define TARGET_RELAXED_ORDERING true
1007 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1008 #define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1011 struct gcc_target targetm
= TARGET_INITIALIZER
;
1014 /* Value is 1 if hard register REGNO can hold a value of machine-mode
1017 rs6000_hard_regno_mode_ok (int regno
, enum machine_mode mode
)
1019 /* The GPRs can hold any mode, but values bigger than one register
1020 cannot go past R31. */
1021 if (INT_REGNO_P (regno
))
1022 return INT_REGNO_P (regno
+ HARD_REGNO_NREGS (regno
, mode
) - 1);
1024 /* The float registers can only hold floating modes and DImode. */
1025 if (FP_REGNO_P (regno
))
1027 (GET_MODE_CLASS (mode
) == MODE_FLOAT
1028 && FP_REGNO_P (regno
+ HARD_REGNO_NREGS (regno
, mode
) - 1))
1029 || (GET_MODE_CLASS (mode
) == MODE_INT
1030 && GET_MODE_SIZE (mode
) == UNITS_PER_FP_WORD
);
1032 /* The CR register can only hold CC modes. */
1033 if (CR_REGNO_P (regno
))
1034 return GET_MODE_CLASS (mode
) == MODE_CC
;
1036 if (XER_REGNO_P (regno
))
1037 return mode
== PSImode
;
1039 /* AltiVec only in AldyVec registers. */
1040 if (ALTIVEC_REGNO_P (regno
))
1041 return ALTIVEC_VECTOR_MODE (mode
);
1043 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1044 if (SPE_SIMD_REGNO_P (regno
) && TARGET_SPE
&& SPE_VECTOR_MODE (mode
))
1047 /* We cannot put TImode anywhere except general register and it must be
1048 able to fit within the register set. */
1050 return GET_MODE_SIZE (mode
) <= UNITS_PER_WORD
;
1053 /* Initialize rs6000_hard_regno_mode_ok_p table. */
1055 rs6000_init_hard_regno_mode_ok (void)
1059 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; ++r
)
1060 for (m
= 0; m
< NUM_MACHINE_MODES
; ++m
)
1061 if (rs6000_hard_regno_mode_ok (r
, m
))
1062 rs6000_hard_regno_mode_ok_p
[m
][r
] = true;
1065 /* If not otherwise specified by a target, make 'long double' equivalent to
1068 #ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1069 #define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1072 /* Override command line options. Mostly we process the processor
1073 type and sometimes adjust other TARGET_ options. */
1076 rs6000_override_options (const char *default_cpu
)
1079 struct rs6000_cpu_select
*ptr
;
1082 /* Simplifications for entries below. */
1085 POWERPC_BASE_MASK
= MASK_POWERPC
| MASK_NEW_MNEMONICS
,
1086 POWERPC_7400_MASK
= POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_ALTIVEC
1089 /* This table occasionally claims that a processor does not support
1090 a particular feature even though it does, but the feature is slower
1091 than the alternative. Thus, it shouldn't be relied on as a
1092 complete description of the processor's support.
1094 Please keep this list in order, and don't forget to update the
1095 documentation in invoke.texi when adding a new processor or
1099 const char *const name
; /* Canonical processor name. */
1100 const enum processor_type processor
; /* Processor type enum value. */
1101 const int target_enable
; /* Target flags to enable. */
1102 } const processor_target_table
[]
1103 = {{"401", PROCESSOR_PPC403
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1104 {"403", PROCESSOR_PPC403
,
1105 POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
| MASK_STRICT_ALIGN
},
1106 {"405", PROCESSOR_PPC405
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1107 {"405fp", PROCESSOR_PPC405
, POWERPC_BASE_MASK
},
1108 {"440", PROCESSOR_PPC440
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1109 {"440fp", PROCESSOR_PPC440
, POWERPC_BASE_MASK
},
1110 {"505", PROCESSOR_MPCCORE
, POWERPC_BASE_MASK
},
1111 {"601", PROCESSOR_PPC601
,
1112 MASK_POWER
| POWERPC_BASE_MASK
| MASK_MULTIPLE
| MASK_STRING
},
1113 {"602", PROCESSOR_PPC603
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1114 {"603", PROCESSOR_PPC603
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1115 {"603e", PROCESSOR_PPC603
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1116 {"604", PROCESSOR_PPC604
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1117 {"604e", PROCESSOR_PPC604e
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1118 {"620", PROCESSOR_PPC620
,
1119 POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_POWERPC64
},
1120 {"630", PROCESSOR_PPC630
,
1121 POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_POWERPC64
},
1122 {"740", PROCESSOR_PPC750
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1123 {"7400", PROCESSOR_PPC7400
, POWERPC_7400_MASK
},
1124 {"7450", PROCESSOR_PPC7450
, POWERPC_7400_MASK
},
1125 {"750", PROCESSOR_PPC750
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1126 {"801", PROCESSOR_MPCCORE
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1127 {"821", PROCESSOR_MPCCORE
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1128 {"823", PROCESSOR_MPCCORE
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1129 {"8540", PROCESSOR_PPC8540
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1130 /* 8548 has a dummy entry for now. */
1131 {"8548", PROCESSOR_PPC8540
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1132 {"860", PROCESSOR_MPCCORE
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1133 {"970", PROCESSOR_POWER4
,
1134 POWERPC_7400_MASK
| MASK_PPC_GPOPT
| MASK_MFCRF
| MASK_POWERPC64
},
1135 {"common", PROCESSOR_COMMON
, MASK_NEW_MNEMONICS
},
1136 {"ec603e", PROCESSOR_PPC603
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1137 {"G3", PROCESSOR_PPC750
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1138 {"G4", PROCESSOR_PPC7450
, POWERPC_7400_MASK
},
1139 {"G5", PROCESSOR_POWER4
,
1140 POWERPC_7400_MASK
| MASK_PPC_GPOPT
| MASK_MFCRF
| MASK_POWERPC64
},
1141 {"power", PROCESSOR_POWER
, MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
},
1142 {"power2", PROCESSOR_POWER
,
1143 MASK_POWER
| MASK_POWER2
| MASK_MULTIPLE
| MASK_STRING
},
1144 {"power3", PROCESSOR_PPC630
,
1145 POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_POWERPC64
},
1146 {"power4", PROCESSOR_POWER4
,
1147 POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_MFCRF
| MASK_POWERPC64
},
1148 {"power5", PROCESSOR_POWER5
,
1149 POWERPC_BASE_MASK
| MASK_POWERPC64
| MASK_PPC_GFXOPT
1150 | MASK_MFCRF
| MASK_POPCNTB
},
1151 {"powerpc", PROCESSOR_POWERPC
, POWERPC_BASE_MASK
},
1152 {"powerpc64", PROCESSOR_POWERPC64
,
1153 POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_POWERPC64
},
1154 {"rios", PROCESSOR_RIOS1
, MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
},
1155 {"rios1", PROCESSOR_RIOS1
, MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
},
1156 {"rios2", PROCESSOR_RIOS2
,
1157 MASK_POWER
| MASK_POWER2
| MASK_MULTIPLE
| MASK_STRING
},
1158 {"rsc", PROCESSOR_PPC601
, MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
},
1159 {"rsc1", PROCESSOR_PPC601
, MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
},
1160 {"rs64", PROCESSOR_RS64A
,
1161 POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_POWERPC64
}
1164 const size_t ptt_size
= ARRAY_SIZE (processor_target_table
);
1166 /* Some OSs don't support saving the high part of 64-bit registers on
1167 context switch. Other OSs don't support saving Altivec registers.
1168 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1169 settings; if the user wants either, the user must explicitly specify
1170 them and we won't interfere with the user's specification. */
1173 POWER_MASKS
= MASK_POWER
| MASK_POWER2
| MASK_MULTIPLE
| MASK_STRING
,
1174 POWERPC_MASKS
= (POWERPC_BASE_MASK
| MASK_PPC_GPOPT
1175 | MASK_PPC_GFXOPT
| MASK_POWERPC64
| MASK_ALTIVEC
1179 rs6000_init_hard_regno_mode_ok ();
1181 set_masks
= POWER_MASKS
| POWERPC_MASKS
| MASK_SOFT_FLOAT
;
1182 #ifdef OS_MISSING_POWERPC64
1183 if (OS_MISSING_POWERPC64
)
1184 set_masks
&= ~MASK_POWERPC64
;
1186 #ifdef OS_MISSING_ALTIVEC
1187 if (OS_MISSING_ALTIVEC
)
1188 set_masks
&= ~MASK_ALTIVEC
;
1191 /* Don't override by the processor default if given explicitly. */
1192 set_masks
&= ~target_flags_explicit
;
1194 /* Identify the processor type. */
1195 rs6000_select
[0].string
= default_cpu
;
1196 rs6000_cpu
= TARGET_POWERPC64
? PROCESSOR_DEFAULT64
: PROCESSOR_DEFAULT
;
1198 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
1200 ptr
= &rs6000_select
[i
];
1201 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
1203 for (j
= 0; j
< ptt_size
; j
++)
1204 if (! strcmp (ptr
->string
, processor_target_table
[j
].name
))
1206 if (ptr
->set_tune_p
)
1207 rs6000_cpu
= processor_target_table
[j
].processor
;
1209 if (ptr
->set_arch_p
)
1211 target_flags
&= ~set_masks
;
1212 target_flags
|= (processor_target_table
[j
].target_enable
1219 error ("bad value (%s) for %s switch", ptr
->string
, ptr
->name
);
1226 /* If we are optimizing big endian systems for space, use the load/store
1227 multiple and string instructions. */
1228 if (BYTES_BIG_ENDIAN
&& optimize_size
)
1229 target_flags
|= ~target_flags_explicit
& (MASK_MULTIPLE
| MASK_STRING
);
1231 /* Don't allow -mmultiple or -mstring on little endian systems
1232 unless the cpu is a 750, because the hardware doesn't support the
1233 instructions used in little endian mode, and causes an alignment
1234 trap. The 750 does not cause an alignment trap (except when the
1235 target is unaligned). */
1237 if (!BYTES_BIG_ENDIAN
&& rs6000_cpu
!= PROCESSOR_PPC750
)
1239 if (TARGET_MULTIPLE
)
1241 target_flags
&= ~MASK_MULTIPLE
;
1242 if ((target_flags_explicit
& MASK_MULTIPLE
) != 0)
1243 warning (0, "-mmultiple is not supported on little endian systems");
1248 target_flags
&= ~MASK_STRING
;
1249 if ((target_flags_explicit
& MASK_STRING
) != 0)
1250 warning (0, "-mstring is not supported on little endian systems");
1254 /* Set debug flags */
1255 if (rs6000_debug_name
)
1257 if (! strcmp (rs6000_debug_name
, "all"))
1258 rs6000_debug_stack
= rs6000_debug_arg
= 1;
1259 else if (! strcmp (rs6000_debug_name
, "stack"))
1260 rs6000_debug_stack
= 1;
1261 else if (! strcmp (rs6000_debug_name
, "arg"))
1262 rs6000_debug_arg
= 1;
1264 error ("unknown -mdebug-%s switch", rs6000_debug_name
);
1267 if (rs6000_traceback_name
)
1269 if (! strncmp (rs6000_traceback_name
, "full", 4))
1270 rs6000_traceback
= traceback_full
;
1271 else if (! strncmp (rs6000_traceback_name
, "part", 4))
1272 rs6000_traceback
= traceback_part
;
1273 else if (! strncmp (rs6000_traceback_name
, "no", 2))
1274 rs6000_traceback
= traceback_none
;
1276 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
1277 rs6000_traceback_name
);
1280 if (!rs6000_explicit_options
.long_double
)
1281 rs6000_long_double_type_size
= RS6000_DEFAULT_LONG_DOUBLE_SIZE
;
1283 /* Set Altivec ABI as default for powerpc64 linux. */
1284 if (TARGET_ELF
&& TARGET_64BIT
)
1286 rs6000_altivec_abi
= 1;
1287 TARGET_ALTIVEC_VRSAVE
= 1;
1290 /* Set the Darwin64 ABI as default for 64-bit Darwin. */
1291 if (DEFAULT_ABI
== ABI_DARWIN
&& TARGET_64BIT
)
1293 rs6000_darwin64_abi
= 1;
1295 darwin_one_byte_bool
= 1;
1297 /* Default to natural alignment, for better performance. */
1298 rs6000_alignment_flags
= MASK_ALIGN_NATURAL
;
1301 /* Handle -mtls-size option. */
1302 rs6000_parse_tls_size_option ();
1304 #ifdef SUBTARGET_OVERRIDE_OPTIONS
1305 SUBTARGET_OVERRIDE_OPTIONS
;
1307 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1308 SUBSUBTARGET_OVERRIDE_OPTIONS
;
1310 #ifdef SUB3TARGET_OVERRIDE_OPTIONS
1311 SUB3TARGET_OVERRIDE_OPTIONS
;
1317 error ("AltiVec and E500 instructions cannot coexist");
1319 /* The e500 does not have string instructions, and we set
1320 MASK_STRING above when optimizing for size. */
1321 if ((target_flags
& MASK_STRING
) != 0)
1322 target_flags
= target_flags
& ~MASK_STRING
;
1324 else if (rs6000_select
[1].string
!= NULL
)
1326 /* For the powerpc-eabispe configuration, we set all these by
1327 default, so let's unset them if we manually set another
1328 CPU that is not the E500. */
1329 if (!rs6000_explicit_options
.abi
)
1331 if (!rs6000_explicit_options
.spe
)
1333 if (!rs6000_explicit_options
.float_gprs
)
1334 rs6000_float_gprs
= 0;
1335 if (!rs6000_explicit_options
.isel
)
1337 if (!rs6000_explicit_options
.long_double
)
1338 rs6000_long_double_type_size
= RS6000_DEFAULT_LONG_DOUBLE_SIZE
;
1341 rs6000_always_hint
= (rs6000_cpu
!= PROCESSOR_POWER4
1342 && rs6000_cpu
!= PROCESSOR_POWER5
);
1343 rs6000_sched_groups
= (rs6000_cpu
== PROCESSOR_POWER4
1344 || rs6000_cpu
== PROCESSOR_POWER5
);
1346 rs6000_sched_restricted_insns_priority
1347 = (rs6000_sched_groups
? 1 : 0);
1349 /* Handle -msched-costly-dep option. */
1350 rs6000_sched_costly_dep
1351 = (rs6000_sched_groups
? store_to_load_dep_costly
: no_dep_costly
);
1353 if (rs6000_sched_costly_dep_str
)
1355 if (! strcmp (rs6000_sched_costly_dep_str
, "no"))
1356 rs6000_sched_costly_dep
= no_dep_costly
;
1357 else if (! strcmp (rs6000_sched_costly_dep_str
, "all"))
1358 rs6000_sched_costly_dep
= all_deps_costly
;
1359 else if (! strcmp (rs6000_sched_costly_dep_str
, "true_store_to_load"))
1360 rs6000_sched_costly_dep
= true_store_to_load_dep_costly
;
1361 else if (! strcmp (rs6000_sched_costly_dep_str
, "store_to_load"))
1362 rs6000_sched_costly_dep
= store_to_load_dep_costly
;
1364 rs6000_sched_costly_dep
= atoi (rs6000_sched_costly_dep_str
);
1367 /* Handle -minsert-sched-nops option. */
1368 rs6000_sched_insert_nops
1369 = (rs6000_sched_groups
? sched_finish_regroup_exact
: sched_finish_none
);
1371 if (rs6000_sched_insert_nops_str
)
1373 if (! strcmp (rs6000_sched_insert_nops_str
, "no"))
1374 rs6000_sched_insert_nops
= sched_finish_none
;
1375 else if (! strcmp (rs6000_sched_insert_nops_str
, "pad"))
1376 rs6000_sched_insert_nops
= sched_finish_pad_groups
;
1377 else if (! strcmp (rs6000_sched_insert_nops_str
, "regroup_exact"))
1378 rs6000_sched_insert_nops
= sched_finish_regroup_exact
;
1380 rs6000_sched_insert_nops
= atoi (rs6000_sched_insert_nops_str
);
1383 #ifdef TARGET_REGNAMES
1384 /* If the user desires alternate register names, copy in the
1385 alternate names now. */
1386 if (TARGET_REGNAMES
)
1387 memcpy (rs6000_reg_names
, alt_reg_names
, sizeof (rs6000_reg_names
));
1390 /* Set aix_struct_return last, after the ABI is determined.
1391 If -maix-struct-return or -msvr4-struct-return was explicitly
1392 used, don't override with the ABI default. */
1393 if (!rs6000_explicit_options
.aix_struct_ret
)
1394 aix_struct_return
= (DEFAULT_ABI
!= ABI_V4
|| DRAFT_V4_STRUCT_RET
);
1396 if (TARGET_LONG_DOUBLE_128
1397 && (DEFAULT_ABI
== ABI_AIX
|| DEFAULT_ABI
== ABI_DARWIN
))
1398 REAL_MODE_FORMAT (TFmode
) = &ibm_extended_format
;
1400 /* Allocate an alias set for register saves & restores from stack. */
1401 rs6000_sr_alias_set
= new_alias_set ();
1404 ASM_GENERATE_INTERNAL_LABEL (toc_label_name
, "LCTOC", 1);
1406 /* We can only guarantee the availability of DI pseudo-ops when
1407 assembling for 64-bit targets. */
1410 targetm
.asm_out
.aligned_op
.di
= NULL
;
1411 targetm
.asm_out
.unaligned_op
.di
= NULL
;
1414 /* Set branch target alignment, if not optimizing for size. */
1417 if (rs6000_sched_groups
)
1419 if (align_functions
<= 0)
1420 align_functions
= 16;
1421 if (align_jumps
<= 0)
1423 if (align_loops
<= 0)
1426 if (align_jumps_max_skip
<= 0)
1427 align_jumps_max_skip
= 15;
1428 if (align_loops_max_skip
<= 0)
1429 align_loops_max_skip
= 15;
1432 /* Arrange to save and restore machine status around nested functions. */
1433 init_machine_status
= rs6000_init_machine_status
;
1435 /* We should always be splitting complex arguments, but we can't break
1436 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
1437 if (DEFAULT_ABI
!= ABI_AIX
)
1438 targetm
.calls
.split_complex_arg
= NULL
;
1440 /* Initialize rs6000_cost with the appropriate target costs. */
1442 rs6000_cost
= TARGET_POWERPC64
? &size64_cost
: &size32_cost
;
1446 case PROCESSOR_RIOS1
:
1447 rs6000_cost
= &rios1_cost
;
1450 case PROCESSOR_RIOS2
:
1451 rs6000_cost
= &rios2_cost
;
1454 case PROCESSOR_RS64A
:
1455 rs6000_cost
= &rs64a_cost
;
1458 case PROCESSOR_MPCCORE
:
1459 rs6000_cost
= &mpccore_cost
;
1462 case PROCESSOR_PPC403
:
1463 rs6000_cost
= &ppc403_cost
;
1466 case PROCESSOR_PPC405
:
1467 rs6000_cost
= &ppc405_cost
;
1470 case PROCESSOR_PPC440
:
1471 rs6000_cost
= &ppc440_cost
;
1474 case PROCESSOR_PPC601
:
1475 rs6000_cost
= &ppc601_cost
;
1478 case PROCESSOR_PPC603
:
1479 rs6000_cost
= &ppc603_cost
;
1482 case PROCESSOR_PPC604
:
1483 rs6000_cost
= &ppc604_cost
;
1486 case PROCESSOR_PPC604e
:
1487 rs6000_cost
= &ppc604e_cost
;
1490 case PROCESSOR_PPC620
:
1491 rs6000_cost
= &ppc620_cost
;
1494 case PROCESSOR_PPC630
:
1495 rs6000_cost
= &ppc630_cost
;
1498 case PROCESSOR_PPC750
:
1499 case PROCESSOR_PPC7400
:
1500 rs6000_cost
= &ppc750_cost
;
1503 case PROCESSOR_PPC7450
:
1504 rs6000_cost
= &ppc7450_cost
;
1507 case PROCESSOR_PPC8540
:
1508 rs6000_cost
= &ppc8540_cost
;
1511 case PROCESSOR_POWER4
:
1512 case PROCESSOR_POWER5
:
1513 rs6000_cost
= &power4_cost
;
1521 /* Implement targetm.vectorize.builtin_mask_for_load. */
1523 rs6000_builtin_mask_for_load (void)
1526 return altivec_builtin_mask_for_load
;
1531 /* Handle generic options of the form -mfoo=yes/no.
1532 NAME is the option name.
1533 VALUE is the option value.
1534 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1535 whether the option value is 'yes' or 'no' respectively. */
1537 rs6000_parse_yes_no_option (const char *name
, const char *value
, int *flag
)
1541 else if (!strcmp (value
, "yes"))
1543 else if (!strcmp (value
, "no"))
1546 error ("unknown -m%s= option specified: '%s'", name
, value
);
1549 /* Validate and record the size specified with the -mtls-size option. */
1552 rs6000_parse_tls_size_option (void)
1554 if (rs6000_tls_size_string
== 0)
1556 else if (strcmp (rs6000_tls_size_string
, "16") == 0)
1557 rs6000_tls_size
= 16;
1558 else if (strcmp (rs6000_tls_size_string
, "32") == 0)
1559 rs6000_tls_size
= 32;
1560 else if (strcmp (rs6000_tls_size_string
, "64") == 0)
1561 rs6000_tls_size
= 64;
1563 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string
);
1567 optimization_options (int level ATTRIBUTE_UNUSED
, int size ATTRIBUTE_UNUSED
)
1569 if (DEFAULT_ABI
== ABI_DARWIN
)
1570 /* The Darwin libraries never set errno, so we might as well
1571 avoid calling them when that's the only reason we would. */
1572 flag_errno_math
= 0;
1575 /* Implement TARGET_HANDLE_OPTION. */
1578 rs6000_handle_option (size_t code
, const char *arg
, int value
)
1583 target_flags
&= ~(MASK_POWER
| MASK_POWER2
1584 | MASK_MULTIPLE
| MASK_STRING
);
1585 target_flags_explicit
|= (MASK_POWER
| MASK_POWER2
1586 | MASK_MULTIPLE
| MASK_STRING
);
1588 case OPT_mno_powerpc
:
1589 target_flags
&= ~(MASK_POWERPC
| MASK_PPC_GPOPT
1590 | MASK_PPC_GFXOPT
| MASK_POWERPC64
);
1591 target_flags_explicit
|= (MASK_POWERPC
| MASK_PPC_GPOPT
1592 | MASK_PPC_GFXOPT
| MASK_POWERPC64
);
1595 target_flags
&= ~(MASK_MINIMAL_TOC
| MASK_NO_FP_IN_TOC
1596 | MASK_NO_SUM_IN_TOC
);
1597 target_flags_explicit
|= (MASK_MINIMAL_TOC
| MASK_NO_FP_IN_TOC
1598 | MASK_NO_SUM_IN_TOC
);
1599 #ifdef TARGET_USES_SYSV4_OPT
1600 /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
1601 just the same as -mminimal-toc. */
1602 target_flags
|= MASK_MINIMAL_TOC
;
1603 target_flags_explicit
|= MASK_MINIMAL_TOC
;
1607 #ifdef TARGET_USES_SYSV4_OPT
1609 /* Make -mtoc behave like -mminimal-toc. */
1610 target_flags
|= MASK_MINIMAL_TOC
;
1611 target_flags_explicit
|= MASK_MINIMAL_TOC
;
1615 #ifdef TARGET_USES_AIX64_OPT
1620 target_flags
|= MASK_POWERPC64
| MASK_POWERPC
| MASK_PPC_GFXOPT
;
1621 target_flags_explicit
|= MASK_POWERPC64
| MASK_POWERPC
1625 #ifdef TARGET_USES_AIX64_OPT
1630 target_flags
&= ~MASK_POWERPC64
;
1631 target_flags_explicit
|= MASK_POWERPC64
;
1634 case OPT_minsert_sched_nops_
:
1635 rs6000_sched_insert_nops_str
= arg
;
1638 case OPT_mminimal_toc
:
1641 target_flags
&= ~(MASK_NO_FP_IN_TOC
| MASK_NO_SUM_IN_TOC
);
1642 target_flags_explicit
|= (MASK_NO_FP_IN_TOC
| MASK_NO_SUM_IN_TOC
);
1649 target_flags
|= (MASK_MULTIPLE
| MASK_STRING
);
1650 target_flags_explicit
|= (MASK_MULTIPLE
| MASK_STRING
);
1657 target_flags
|= (MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
);
1658 target_flags_explicit
|= (MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
);
1662 case OPT_mpowerpc_gpopt
:
1663 case OPT_mpowerpc_gfxopt
:
1666 target_flags
|= MASK_POWERPC
;
1667 target_flags_explicit
|= MASK_POWERPC
;
1671 case OPT_maix_struct_return
:
1672 case OPT_msvr4_struct_return
:
1673 rs6000_explicit_options
.aix_struct_ret
= true;
1677 rs6000_parse_yes_no_option ("vrsave", arg
, &(TARGET_ALTIVEC_VRSAVE
));
1681 rs6000_explicit_options
.isel
= true;
1682 rs6000_parse_yes_no_option ("isel", arg
, &(rs6000_isel
));
1686 rs6000_explicit_options
.spe
= true;
1687 rs6000_parse_yes_no_option ("spe", arg
, &(rs6000_spe
));
1688 /* No SPE means 64-bit long doubles, even if an E500. */
1690 rs6000_long_double_type_size
= 64;
1694 rs6000_debug_name
= arg
;
1697 #ifdef TARGET_USES_SYSV4_OPT
1699 rs6000_abi_name
= arg
;
1703 rs6000_sdata_name
= arg
;
1706 case OPT_mtls_size_
:
1707 rs6000_tls_size_string
= arg
;
1710 case OPT_mrelocatable
:
1713 target_flags
|= MASK_MINIMAL_TOC
| MASK_NO_FP_IN_TOC
;
1714 target_flags_explicit
|= MASK_MINIMAL_TOC
| MASK_NO_FP_IN_TOC
;
1718 case OPT_mrelocatable_lib
:
1721 target_flags
|= MASK_RELOCATABLE
| MASK_MINIMAL_TOC
1722 | MASK_NO_FP_IN_TOC
;
1723 target_flags_explicit
|= MASK_RELOCATABLE
| MASK_MINIMAL_TOC
1724 | MASK_NO_FP_IN_TOC
;
1728 target_flags
&= ~MASK_RELOCATABLE
;
1729 target_flags_explicit
|= MASK_RELOCATABLE
;
1735 rs6000_explicit_options
.abi
= true;
1736 if (!strcmp (arg
, "altivec"))
1738 rs6000_altivec_abi
= 1;
1741 else if (! strcmp (arg
, "no-altivec"))
1742 rs6000_altivec_abi
= 0;
1743 else if (! strcmp (arg
, "spe"))
1746 rs6000_altivec_abi
= 0;
1747 if (!TARGET_SPE_ABI
)
1748 error ("not configured for ABI: '%s'", arg
);
1750 else if (! strcmp (arg
, "no-spe"))
1753 /* These are here for testing during development only, do not
1754 document in the manual please. */
1755 else if (! strcmp (arg
, "d64"))
1757 rs6000_darwin64_abi
= 1;
1758 warning (0, "Using darwin64 ABI");
1760 else if (! strcmp (arg
, "d32"))
1762 rs6000_darwin64_abi
= 0;
1763 warning (0, "Using old darwin ABI");
1768 error ("unknown ABI specified: '%s'", arg
);
1774 rs6000_select
[1].string
= arg
;
1778 rs6000_select
[2].string
= arg
;
1781 case OPT_mtraceback_
:
1782 rs6000_traceback_name
= arg
;
1785 case OPT_mfloat_gprs_
:
1786 rs6000_explicit_options
.float_gprs
= true;
1787 if (! strcmp (arg
, "yes") || ! strcmp (arg
, "single"))
1788 rs6000_float_gprs
= 1;
1789 else if (! strcmp (arg
, "double"))
1790 rs6000_float_gprs
= 2;
1791 else if (! strcmp (arg
, "no"))
1792 rs6000_float_gprs
= 0;
1795 error ("invalid option for -mfloat-gprs: '%s'", arg
);
1800 case OPT_mlong_double_
:
1801 rs6000_explicit_options
.long_double
= true;
1802 rs6000_long_double_type_size
= RS6000_DEFAULT_LONG_DOUBLE_SIZE
;
1803 if (value
!= 64 && value
!= 128)
1805 error ("Unknown switch -mlong-double-%s", arg
);
1806 rs6000_long_double_type_size
= RS6000_DEFAULT_LONG_DOUBLE_SIZE
;
1810 rs6000_long_double_type_size
= value
;
1813 case OPT_msched_costly_dep_
:
1814 rs6000_sched_costly_dep_str
= arg
;
1818 rs6000_explicit_options
.alignment
= true;
1819 if (! strcmp (arg
, "power"))
1821 /* On 64-bit Darwin, power alignment is ABI-incompatible with
1822 some C library functions, so warn about it. The flag may be
1823 useful for performance studies from time to time though, so
1824 don't disable it entirely. */
1825 if (DEFAULT_ABI
== ABI_DARWIN
&& TARGET_64BIT
)
1826 warning (0, "-malign-power is not supported for 64-bit Darwin;"
1827 " it is incompatible with the installed C and C++ libraries");
1828 rs6000_alignment_flags
= MASK_ALIGN_POWER
;
1830 else if (! strcmp (arg
, "natural"))
1831 rs6000_alignment_flags
= MASK_ALIGN_NATURAL
;
1834 error ("unknown -malign-XXXXX option specified: '%s'", arg
);
1842 /* Do anything needed at the start of the asm file. */
1845 rs6000_file_start (void)
1849 const char *start
= buffer
;
1850 struct rs6000_cpu_select
*ptr
;
1851 const char *default_cpu
= TARGET_CPU_DEFAULT
;
1852 FILE *file
= asm_out_file
;
1854 default_file_start ();
1856 #ifdef TARGET_BI_ARCH
1857 if ((TARGET_DEFAULT
^ target_flags
) & MASK_64BIT
)
1861 if (flag_verbose_asm
)
1863 sprintf (buffer
, "\n%s rs6000/powerpc options:", ASM_COMMENT_START
);
1864 rs6000_select
[0].string
= default_cpu
;
1866 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
1868 ptr
= &rs6000_select
[i
];
1869 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
1871 fprintf (file
, "%s %s%s", start
, ptr
->name
, ptr
->string
);
1876 if (PPC405_ERRATUM77
)
1878 fprintf (file
, "%s PPC405CR_ERRATUM77", start
);
1882 #ifdef USING_ELFOS_H
1883 switch (rs6000_sdata
)
1885 case SDATA_NONE
: fprintf (file
, "%s -msdata=none", start
); start
= ""; break;
1886 case SDATA_DATA
: fprintf (file
, "%s -msdata=data", start
); start
= ""; break;
1887 case SDATA_SYSV
: fprintf (file
, "%s -msdata=sysv", start
); start
= ""; break;
1888 case SDATA_EABI
: fprintf (file
, "%s -msdata=eabi", start
); start
= ""; break;
1891 if (rs6000_sdata
&& g_switch_value
)
1893 fprintf (file
, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED
, start
,
1903 if (DEFAULT_ABI
== ABI_AIX
|| (TARGET_ELF
&& flag_pic
== 2))
1911 /* Return nonzero if this function is known to have a null epilogue. */
1914 direct_return (void)
1916 if (reload_completed
)
1918 rs6000_stack_t
*info
= rs6000_stack_info ();
1920 if (info
->first_gp_reg_save
== 32
1921 && info
->first_fp_reg_save
== 64
1922 && info
->first_altivec_reg_save
== LAST_ALTIVEC_REGNO
+ 1
1923 && ! info
->lr_save_p
1924 && ! info
->cr_save_p
1925 && info
->vrsave_mask
== 0
1933 /* Return the number of instructions it takes to form a constant in an
1934 integer register. */
1937 num_insns_constant_wide (HOST_WIDE_INT value
)
1939 /* signed constant loadable with {cal|addi} */
1940 if (CONST_OK_FOR_LETTER_P (value
, 'I'))
1943 /* constant loadable with {cau|addis} */
1944 else if (CONST_OK_FOR_LETTER_P (value
, 'L'))
1947 #if HOST_BITS_PER_WIDE_INT == 64
1948 else if (TARGET_POWERPC64
)
1950 HOST_WIDE_INT low
= ((value
& 0xffffffff) ^ 0x80000000) - 0x80000000;
1951 HOST_WIDE_INT high
= value
>> 31;
1953 if (high
== 0 || high
== -1)
1959 return num_insns_constant_wide (high
) + 1;
1961 return (num_insns_constant_wide (high
)
1962 + num_insns_constant_wide (low
) + 1);
1971 num_insns_constant (rtx op
, enum machine_mode mode
)
1973 HOST_WIDE_INT low
, high
;
1975 switch (GET_CODE (op
))
1978 #if HOST_BITS_PER_WIDE_INT == 64
1979 if ((INTVAL (op
) >> 31) != 0 && (INTVAL (op
) >> 31) != -1
1980 && mask_operand (op
, mode
))
1984 return num_insns_constant_wide (INTVAL (op
));
1992 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
1993 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
1994 return num_insns_constant_wide ((HOST_WIDE_INT
) l
);
1997 if (mode
== VOIDmode
|| mode
== DImode
)
1999 high
= CONST_DOUBLE_HIGH (op
);
2000 low
= CONST_DOUBLE_LOW (op
);
2007 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
2008 REAL_VALUE_TO_TARGET_DOUBLE (rv
, l
);
2009 high
= l
[WORDS_BIG_ENDIAN
== 0];
2010 low
= l
[WORDS_BIG_ENDIAN
!= 0];
2014 return (num_insns_constant_wide (low
)
2015 + num_insns_constant_wide (high
));
2018 if ((high
== 0 && low
>= 0)
2019 || (high
== -1 && low
< 0))
2020 return num_insns_constant_wide (low
);
2022 else if (mask_operand (op
, mode
))
2026 return num_insns_constant_wide (high
) + 1;
2029 return (num_insns_constant_wide (high
)
2030 + num_insns_constant_wide (low
) + 1);
2038 /* Returns the constant for the splat instruction, if exists. */
2041 easy_vector_splat_const (int cst
, enum machine_mode mode
)
2046 if (EASY_VECTOR_15 (cst
)
2047 || EASY_VECTOR_15_ADD_SELF (cst
))
2049 if ((cst
& 0xffff) != ((cst
>> 16) & 0xffff))
2055 if (EASY_VECTOR_15 (cst
)
2056 || EASY_VECTOR_15_ADD_SELF (cst
))
2058 if ((cst
& 0xff) != ((cst
>> 8) & 0xff))
2064 if (EASY_VECTOR_15 (cst
)
2065 || EASY_VECTOR_15_ADD_SELF (cst
))
2073 /* Return nonzero if all elements of a vector have the same value. */
2076 easy_vector_same (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2080 units
= CONST_VECTOR_NUNITS (op
);
2082 cst
= INTVAL (CONST_VECTOR_ELT (op
, 0));
2083 for (i
= 1; i
< units
; ++i
)
2084 if (INTVAL (CONST_VECTOR_ELT (op
, i
)) != cst
)
2086 if (i
== units
&& easy_vector_splat_const (cst
, mode
))
2091 /* Generate easy_vector_constant out of a easy_vector_constant_add_self. */
2094 gen_easy_vector_constant_add_self (rtx op
)
2098 units
= GET_MODE_NUNITS (GET_MODE (op
));
2099 v
= rtvec_alloc (units
);
2101 for (i
= 0; i
< units
; i
++)
2103 GEN_INT (INTVAL (CONST_VECTOR_ELT (op
, i
)) >> 1);
2104 return gen_rtx_raw_CONST_VECTOR (GET_MODE (op
), v
);
2108 output_vec_const_move (rtx
*operands
)
2111 enum machine_mode mode
;
2117 cst
= INTVAL (CONST_VECTOR_ELT (vec
, 0));
2118 cst2
= INTVAL (CONST_VECTOR_ELT (vec
, 1));
2119 mode
= GET_MODE (dest
);
2123 if (zero_constant (vec
, mode
))
2124 return "vxor %0,%0,%0";
2126 gcc_assert (easy_vector_constant (vec
, mode
));
2128 operands
[1] = GEN_INT (cst
);
2132 if (EASY_VECTOR_15 (cst
))
2134 operands
[1] = GEN_INT (cst
);
2135 return "vspltisw %0,%1";
2137 else if (EASY_VECTOR_15_ADD_SELF (cst
))
2143 if (EASY_VECTOR_15 (cst
))
2145 operands
[1] = GEN_INT (cst
);
2146 return "vspltish %0,%1";
2148 else if (EASY_VECTOR_15_ADD_SELF (cst
))
2154 if (EASY_VECTOR_15 (cst
))
2156 operands
[1] = GEN_INT (cst
);
2157 return "vspltisb %0,%1";
2159 else if (EASY_VECTOR_15_ADD_SELF (cst
))
2167 gcc_assert (TARGET_SPE
);
2169 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2170 pattern of V1DI, V4HI, and V2SF.
2172 FIXME: We should probably return # and add post reload
2173 splitters for these, but this way is so easy ;-). */
2174 operands
[1] = GEN_INT (cst
);
2175 operands
[2] = GEN_INT (cst2
);
2177 return "li %0,%1\n\tevmergelo %0,%0,%0";
2179 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
2183 mask64_1or2_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
,
2186 if (GET_CODE (op
) == CONST_INT
)
2188 HOST_WIDE_INT c
, lsb
;
2193 /* Disallow all zeros. */
2197 /* We can use a single rlwinm insn if no upper bits of C are set
2198 AND there are zero, one or two transitions in the _whole_ of
2200 one_ok
= !(c
& ~(HOST_WIDE_INT
)0xffffffff);
2202 /* We don't change the number of transitions by inverting,
2203 so make sure we start with the LS bit zero. */
2207 /* Find the first transition. */
2210 /* Invert to look for a second transition. */
2213 /* Erase first transition. */
2216 /* Find the second transition. */
2219 /* Invert to look for a third transition. */
2222 /* Erase second transition. */
2225 if (one_ok
&& !(allow_one
|| c
))
2228 /* Find the third transition (if any). */
2231 /* Match if all the bits above are 1's (or c is zero). */
2237 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
2238 implement ANDing by the mask IN. */
2240 build_mask64_2_operands (rtx in
, rtx
*out
)
2242 #if HOST_BITS_PER_WIDE_INT >= 64
2243 unsigned HOST_WIDE_INT c
, lsb
, m1
, m2
;
2246 gcc_assert (GET_CODE (in
) == CONST_INT
);
2251 /* Assume c initially something like 0x00fff000000fffff. The idea
2252 is to rotate the word so that the middle ^^^^^^ group of zeros
2253 is at the MS end and can be cleared with an rldicl mask. We then
2254 rotate back and clear off the MS ^^ group of zeros with a
2256 c
= ~c
; /* c == 0xff000ffffff00000 */
2257 lsb
= c
& -c
; /* lsb == 0x0000000000100000 */
2258 m1
= -lsb
; /* m1 == 0xfffffffffff00000 */
2259 c
= ~c
; /* c == 0x00fff000000fffff */
2260 c
&= -lsb
; /* c == 0x00fff00000000000 */
2261 lsb
= c
& -c
; /* lsb == 0x0000100000000000 */
2262 c
= ~c
; /* c == 0xff000fffffffffff */
2263 c
&= -lsb
; /* c == 0xff00000000000000 */
2265 while ((lsb
>>= 1) != 0)
2266 shift
++; /* shift == 44 on exit from loop */
2267 m1
<<= 64 - shift
; /* m1 == 0xffffff0000000000 */
2268 m1
= ~m1
; /* m1 == 0x000000ffffffffff */
2269 m2
= ~c
; /* m2 == 0x00ffffffffffffff */
2273 /* Assume c initially something like 0xff000f0000000000. The idea
2274 is to rotate the word so that the ^^^ middle group of zeros
2275 is at the LS end and can be cleared with an rldicr mask. We then
2276 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2278 lsb
= c
& -c
; /* lsb == 0x0000010000000000 */
2279 m2
= -lsb
; /* m2 == 0xffffff0000000000 */
2280 c
= ~c
; /* c == 0x00fff0ffffffffff */
2281 c
&= -lsb
; /* c == 0x00fff00000000000 */
2282 lsb
= c
& -c
; /* lsb == 0x0000100000000000 */
2283 c
= ~c
; /* c == 0xff000fffffffffff */
2284 c
&= -lsb
; /* c == 0xff00000000000000 */
2286 while ((lsb
>>= 1) != 0)
2287 shift
++; /* shift == 44 on exit from loop */
2288 m1
= ~c
; /* m1 == 0x00ffffffffffffff */
2289 m1
>>= shift
; /* m1 == 0x0000000000000fff */
2290 m1
= ~m1
; /* m1 == 0xfffffffffffff000 */
2293 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2294 masks will be all 1's. We are guaranteed more than one transition. */
2295 out
[0] = GEN_INT (64 - shift
);
2296 out
[1] = GEN_INT (m1
);
2297 out
[2] = GEN_INT (shift
);
2298 out
[3] = GEN_INT (m2
);
2306 /* Return TRUE if OP is an invalid SUBREG operation on the e500. */
2309 invalid_e500_subreg (rtx op
, enum machine_mode mode
)
2311 /* Reject (subreg:SI (reg:DF)). */
2312 if (GET_CODE (op
) == SUBREG
2314 && REG_P (SUBREG_REG (op
))
2315 && GET_MODE (SUBREG_REG (op
)) == DFmode
)
2318 /* Reject (subreg:DF (reg:DI)). */
2319 if (GET_CODE (op
) == SUBREG
2321 && REG_P (SUBREG_REG (op
))
2322 && GET_MODE (SUBREG_REG (op
)) == DImode
)
2328 /* Darwin, AIX increases natural record alignment to doubleword if the first
2329 field is an FP double while the FP fields remain word aligned. */
2332 rs6000_special_round_type_align (tree type
, int computed
, int specified
)
2334 tree field
= TYPE_FIELDS (type
);
2336 /* Skip all non field decls */
2337 while (field
!= NULL
&& TREE_CODE (field
) != FIELD_DECL
)
2338 field
= TREE_CHAIN (field
);
2340 if (field
== NULL
|| field
== type
|| DECL_MODE (field
) != DFmode
)
2341 return MAX (computed
, specified
);
2343 return MAX (MAX (computed
, specified
), 64);
2346 /* Return 1 for an operand in small memory on V.4/eabi. */
2349 small_data_operand (rtx op ATTRIBUTE_UNUSED
,
2350 enum machine_mode mode ATTRIBUTE_UNUSED
)
2355 if (rs6000_sdata
== SDATA_NONE
|| rs6000_sdata
== SDATA_DATA
)
2358 if (DEFAULT_ABI
!= ABI_V4
)
2361 if (GET_CODE (op
) == SYMBOL_REF
)
2364 else if (GET_CODE (op
) != CONST
2365 || GET_CODE (XEXP (op
, 0)) != PLUS
2366 || GET_CODE (XEXP (XEXP (op
, 0), 0)) != SYMBOL_REF
2367 || GET_CODE (XEXP (XEXP (op
, 0), 1)) != CONST_INT
)
2372 rtx sum
= XEXP (op
, 0);
2373 HOST_WIDE_INT summand
;
2375 /* We have to be careful here, because it is the referenced address
2376 that must be 32k from _SDA_BASE_, not just the symbol. */
2377 summand
= INTVAL (XEXP (sum
, 1));
2378 if (summand
< 0 || (unsigned HOST_WIDE_INT
) summand
> g_switch_value
)
2381 sym_ref
= XEXP (sum
, 0);
2384 return SYMBOL_REF_SMALL_P (sym_ref
);
2390 /* Return true if either operand is a general purpose register. */
2393 gpr_or_gpr_p (rtx op0
, rtx op1
)
2395 return ((REG_P (op0
) && INT_REGNO_P (REGNO (op0
)))
2396 || (REG_P (op1
) && INT_REGNO_P (REGNO (op1
))));
2400 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2403 constant_pool_expr_1 (rtx op
, int *have_sym
, int *have_toc
)
2405 switch (GET_CODE (op
))
2408 if (RS6000_SYMBOL_REF_TLS_P (op
))
2410 else if (CONSTANT_POOL_ADDRESS_P (op
))
2412 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op
), Pmode
))
2420 else if (! strcmp (XSTR (op
, 0), toc_label_name
))
2429 return (constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
)
2430 && constant_pool_expr_1 (XEXP (op
, 1), have_sym
, have_toc
));
2432 return constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
);
2441 constant_pool_expr_p (rtx op
)
2445 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_sym
;
2449 toc_relative_expr_p (rtx op
)
2453 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_toc
;
2457 legitimate_constant_pool_address_p (rtx x
)
2460 && GET_CODE (x
) == PLUS
2461 && GET_CODE (XEXP (x
, 0)) == REG
2462 && (TARGET_MINIMAL_TOC
|| REGNO (XEXP (x
, 0)) == TOC_REGISTER
)
2463 && constant_pool_expr_p (XEXP (x
, 1)));
2467 legitimate_small_data_p (enum machine_mode mode
, rtx x
)
2469 return (DEFAULT_ABI
== ABI_V4
2470 && !flag_pic
&& !TARGET_TOC
2471 && (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == CONST
)
2472 && small_data_operand (x
, mode
));
2475 /* SPE offset addressing is limited to 5-bits worth of double words. */
2476 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2479 rs6000_legitimate_offset_address_p (enum machine_mode mode
, rtx x
, int strict
)
2481 unsigned HOST_WIDE_INT offset
, extra
;
2483 if (GET_CODE (x
) != PLUS
)
2485 if (GET_CODE (XEXP (x
, 0)) != REG
)
2487 if (!INT_REG_OK_FOR_BASE_P (XEXP (x
, 0), strict
))
2489 if (legitimate_constant_pool_address_p (x
))
2491 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
2494 offset
= INTVAL (XEXP (x
, 1));
2502 /* AltiVec vector modes. Only reg+reg addressing is valid here,
2503 which leaves the only valid constant offset of zero, which by
2504 canonicalization rules is also invalid. */
2511 /* SPE vector modes. */
2512 return SPE_CONST_OFFSET_OK (offset
);
2515 if (TARGET_E500_DOUBLE
)
2516 return SPE_CONST_OFFSET_OK (offset
);
2519 /* On e500v2, we may have:
2521 (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
2523 Which gets addressed with evldd instructions. */
2524 if (TARGET_E500_DOUBLE
)
2525 return SPE_CONST_OFFSET_OK (offset
);
2527 if (mode
== DFmode
|| !TARGET_POWERPC64
)
2529 else if (offset
& 3)
2535 if (mode
== TFmode
|| !TARGET_POWERPC64
)
2537 else if (offset
& 3)
2548 return (offset
< 0x10000) && (offset
+ extra
< 0x10000);
2552 legitimate_indexed_address_p (rtx x
, int strict
)
2556 if (GET_CODE (x
) != PLUS
)
2562 if (!REG_P (op0
) || !REG_P (op1
))
2565 return ((INT_REG_OK_FOR_BASE_P (op0
, strict
)
2566 && INT_REG_OK_FOR_INDEX_P (op1
, strict
))
2567 || (INT_REG_OK_FOR_BASE_P (op1
, strict
)
2568 && INT_REG_OK_FOR_INDEX_P (op0
, strict
)));
2572 legitimate_indirect_address_p (rtx x
, int strict
)
2574 return GET_CODE (x
) == REG
&& INT_REG_OK_FOR_BASE_P (x
, strict
);
2578 macho_lo_sum_memory_operand (rtx x
, enum machine_mode mode
)
2580 if (!TARGET_MACHO
|| !flag_pic
2581 || mode
!= SImode
|| GET_CODE (x
) != MEM
)
2585 if (GET_CODE (x
) != LO_SUM
)
2587 if (GET_CODE (XEXP (x
, 0)) != REG
)
2589 if (!INT_REG_OK_FOR_BASE_P (XEXP (x
, 0), 0))
2593 return CONSTANT_P (x
);
2597 legitimate_lo_sum_address_p (enum machine_mode mode
, rtx x
, int strict
)
2599 if (GET_CODE (x
) != LO_SUM
)
2601 if (GET_CODE (XEXP (x
, 0)) != REG
)
2603 if (!INT_REG_OK_FOR_BASE_P (XEXP (x
, 0), strict
))
2605 /* Restrict addressing for DI because of our SUBREG hackery. */
2606 if (TARGET_E500_DOUBLE
&& (mode
== DFmode
|| mode
== DImode
))
2610 if (TARGET_ELF
|| TARGET_MACHO
)
2612 if (DEFAULT_ABI
!= ABI_AIX
&& DEFAULT_ABI
!= ABI_DARWIN
&& flag_pic
)
2616 if (GET_MODE_NUNITS (mode
) != 1)
2618 if (GET_MODE_BITSIZE (mode
) > 64
2619 || (GET_MODE_BITSIZE (mode
) > 32 && !TARGET_POWERPC64
2620 && !(TARGET_HARD_FLOAT
&& TARGET_FPRS
&& mode
== DFmode
)))
2623 return CONSTANT_P (x
);
2630 /* Try machine-dependent ways of modifying an illegitimate address
2631 to be legitimate. If we find one, return the new, valid address.
2632 This is used from only one place: `memory_address' in explow.c.
2634 OLDX is the address as it was before break_out_memory_refs was
2635 called. In some cases it is useful to look at this to decide what
2638 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2640 It is always safe for this function to do nothing. It exists to
2641 recognize opportunities to optimize the output.
2643 On RS/6000, first check for the sum of a register with a constant
2644 integer that is out of range. If so, generate code to add the
2645 constant with the low-order 16 bits masked to the register and force
2646 this result into another register (this can be done with `cau').
2647 Then generate an address of REG+(CONST&0xffff), allowing for the
2648 possibility of bit 16 being a one.
2650 Then check for the sum of a register and something not constant, try to
2651 load the other things into a register and return the sum. */
2654 rs6000_legitimize_address (rtx x
, rtx oldx ATTRIBUTE_UNUSED
,
2655 enum machine_mode mode
)
2657 if (GET_CODE (x
) == SYMBOL_REF
)
2659 enum tls_model model
= SYMBOL_REF_TLS_MODEL (x
);
2661 return rs6000_legitimize_tls_address (x
, model
);
2664 if (GET_CODE (x
) == PLUS
2665 && GET_CODE (XEXP (x
, 0)) == REG
2666 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2667 && (unsigned HOST_WIDE_INT
) (INTVAL (XEXP (x
, 1)) + 0x8000) >= 0x10000)
2669 HOST_WIDE_INT high_int
, low_int
;
2671 low_int
= ((INTVAL (XEXP (x
, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2672 high_int
= INTVAL (XEXP (x
, 1)) - low_int
;
2673 sum
= force_operand (gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
2674 GEN_INT (high_int
)), 0);
2675 return gen_rtx_PLUS (Pmode
, sum
, GEN_INT (low_int
));
2677 else if (GET_CODE (x
) == PLUS
2678 && GET_CODE (XEXP (x
, 0)) == REG
2679 && GET_CODE (XEXP (x
, 1)) != CONST_INT
2680 && GET_MODE_NUNITS (mode
) == 1
2681 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
2683 || (((mode
!= DImode
&& mode
!= DFmode
) || TARGET_E500_DOUBLE
)
2685 && (TARGET_POWERPC64
|| mode
!= DImode
)
2688 return gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
2689 force_reg (Pmode
, force_operand (XEXP (x
, 1), 0)));
2691 else if (ALTIVEC_VECTOR_MODE (mode
))
2695 /* Make sure both operands are registers. */
2696 if (GET_CODE (x
) == PLUS
)
2697 return gen_rtx_PLUS (Pmode
, force_reg (Pmode
, XEXP (x
, 0)),
2698 force_reg (Pmode
, XEXP (x
, 1)));
2700 reg
= force_reg (Pmode
, x
);
2703 else if (SPE_VECTOR_MODE (mode
)
2704 || (TARGET_E500_DOUBLE
&& (mode
== DFmode
2705 || mode
== DImode
)))
2709 /* We accept [reg + reg] and [reg + OFFSET]. */
2711 if (GET_CODE (x
) == PLUS
)
2713 rtx op1
= XEXP (x
, 0);
2714 rtx op2
= XEXP (x
, 1);
2716 op1
= force_reg (Pmode
, op1
);
2718 if (GET_CODE (op2
) != REG
2719 && (GET_CODE (op2
) != CONST_INT
2720 || !SPE_CONST_OFFSET_OK (INTVAL (op2
))))
2721 op2
= force_reg (Pmode
, op2
);
2723 return gen_rtx_PLUS (Pmode
, op1
, op2
);
2726 return force_reg (Pmode
, x
);
2732 && GET_CODE (x
) != CONST_INT
2733 && GET_CODE (x
) != CONST_DOUBLE
2735 && GET_MODE_NUNITS (mode
) == 1
2736 && (GET_MODE_BITSIZE (mode
) <= 32
2737 || ((TARGET_HARD_FLOAT
&& TARGET_FPRS
) && mode
== DFmode
)))
2739 rtx reg
= gen_reg_rtx (Pmode
);
2740 emit_insn (gen_elf_high (reg
, x
));
2741 return gen_rtx_LO_SUM (Pmode
, reg
, x
);
2743 else if (TARGET_MACHO
&& TARGET_32BIT
&& TARGET_NO_TOC
2746 && ! MACHO_DYNAMIC_NO_PIC_P
2748 && GET_CODE (x
) != CONST_INT
2749 && GET_CODE (x
) != CONST_DOUBLE
2751 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
) || mode
!= DFmode
)
2755 rtx reg
= gen_reg_rtx (Pmode
);
2756 emit_insn (gen_macho_high (reg
, x
));
2757 return gen_rtx_LO_SUM (Pmode
, reg
, x
);
2760 && constant_pool_expr_p (x
)
2761 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), Pmode
))
2763 return create_TOC_reference (x
);
2769 /* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
2770 We need to emit DTP-relative relocations. */
2773 rs6000_output_dwarf_dtprel (FILE *file
, int size
, rtx x
)
2778 fputs ("\t.long\t", file
);
2781 fputs (DOUBLE_INT_ASM_OP
, file
);
2786 output_addr_const (file
, x
);
2787 fputs ("@dtprel+0x8000", file
);
2790 /* Construct the SYMBOL_REF for the tls_get_addr function. */
2792 static GTY(()) rtx rs6000_tls_symbol
;
2794 rs6000_tls_get_addr (void)
2796 if (!rs6000_tls_symbol
)
2797 rs6000_tls_symbol
= init_one_libfunc ("__tls_get_addr");
2799 return rs6000_tls_symbol
;
2802 /* Construct the SYMBOL_REF for TLS GOT references. */
2804 static GTY(()) rtx rs6000_got_symbol
;
2806 rs6000_got_sym (void)
2808 if (!rs6000_got_symbol
)
2810 rs6000_got_symbol
= gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
2811 SYMBOL_REF_FLAGS (rs6000_got_symbol
) |= SYMBOL_FLAG_LOCAL
;
2812 SYMBOL_REF_FLAGS (rs6000_got_symbol
) |= SYMBOL_FLAG_EXTERNAL
;
2815 return rs6000_got_symbol
;
2818 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2819 this (thread-local) address. */
2822 rs6000_legitimize_tls_address (rtx addr
, enum tls_model model
)
2826 dest
= gen_reg_rtx (Pmode
);
2827 if (model
== TLS_MODEL_LOCAL_EXEC
&& rs6000_tls_size
== 16)
2833 tlsreg
= gen_rtx_REG (Pmode
, 13);
2834 insn
= gen_tls_tprel_64 (dest
, tlsreg
, addr
);
2838 tlsreg
= gen_rtx_REG (Pmode
, 2);
2839 insn
= gen_tls_tprel_32 (dest
, tlsreg
, addr
);
2843 else if (model
== TLS_MODEL_LOCAL_EXEC
&& rs6000_tls_size
== 32)
2847 tmp
= gen_reg_rtx (Pmode
);
2850 tlsreg
= gen_rtx_REG (Pmode
, 13);
2851 insn
= gen_tls_tprel_ha_64 (tmp
, tlsreg
, addr
);
2855 tlsreg
= gen_rtx_REG (Pmode
, 2);
2856 insn
= gen_tls_tprel_ha_32 (tmp
, tlsreg
, addr
);
2860 insn
= gen_tls_tprel_lo_64 (dest
, tmp
, addr
);
2862 insn
= gen_tls_tprel_lo_32 (dest
, tmp
, addr
);
2867 rtx r3
, got
, tga
, tmp1
, tmp2
, eqv
;
2870 got
= gen_rtx_REG (Pmode
, TOC_REGISTER
);
2874 got
= gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
);
2877 rtx gsym
= rs6000_got_sym ();
2878 got
= gen_reg_rtx (Pmode
);
2880 rs6000_emit_move (got
, gsym
, Pmode
);
2883 rtx tempLR
, tmp3
, mem
;
2886 tempLR
= gen_reg_rtx (Pmode
);
2887 tmp1
= gen_reg_rtx (Pmode
);
2888 tmp2
= gen_reg_rtx (Pmode
);
2889 tmp3
= gen_reg_rtx (Pmode
);
2890 mem
= gen_const_mem (Pmode
, tmp1
);
2892 first
= emit_insn (gen_load_toc_v4_PIC_1b (tempLR
, gsym
));
2893 emit_move_insn (tmp1
, tempLR
);
2894 emit_move_insn (tmp2
, mem
);
2895 emit_insn (gen_addsi3 (tmp3
, tmp1
, tmp2
));
2896 last
= emit_move_insn (got
, tmp3
);
2897 REG_NOTES (last
) = gen_rtx_EXPR_LIST (REG_EQUAL
, gsym
,
2899 REG_NOTES (first
) = gen_rtx_INSN_LIST (REG_LIBCALL
, last
,
2901 REG_NOTES (last
) = gen_rtx_INSN_LIST (REG_RETVAL
, first
,
2907 if (model
== TLS_MODEL_GLOBAL_DYNAMIC
)
2909 r3
= gen_rtx_REG (Pmode
, 3);
2911 insn
= gen_tls_gd_64 (r3
, got
, addr
);
2913 insn
= gen_tls_gd_32 (r3
, got
, addr
);
2916 tga
= gen_rtx_MEM (Pmode
, rs6000_tls_get_addr ());
2917 insn
= gen_call_value (r3
, tga
, const0_rtx
, const0_rtx
);
2918 insn
= emit_call_insn (insn
);
2919 CONST_OR_PURE_CALL_P (insn
) = 1;
2920 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), r3
);
2921 insn
= get_insns ();
2923 emit_libcall_block (insn
, dest
, r3
, addr
);
2925 else if (model
== TLS_MODEL_LOCAL_DYNAMIC
)
2927 r3
= gen_rtx_REG (Pmode
, 3);
2929 insn
= gen_tls_ld_64 (r3
, got
);
2931 insn
= gen_tls_ld_32 (r3
, got
);
2934 tga
= gen_rtx_MEM (Pmode
, rs6000_tls_get_addr ());
2935 insn
= gen_call_value (r3
, tga
, const0_rtx
, const0_rtx
);
2936 insn
= emit_call_insn (insn
);
2937 CONST_OR_PURE_CALL_P (insn
) = 1;
2938 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), r3
);
2939 insn
= get_insns ();
2941 tmp1
= gen_reg_rtx (Pmode
);
2942 eqv
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
),
2944 emit_libcall_block (insn
, tmp1
, r3
, eqv
);
2945 if (rs6000_tls_size
== 16)
2948 insn
= gen_tls_dtprel_64 (dest
, tmp1
, addr
);
2950 insn
= gen_tls_dtprel_32 (dest
, tmp1
, addr
);
2952 else if (rs6000_tls_size
== 32)
2954 tmp2
= gen_reg_rtx (Pmode
);
2956 insn
= gen_tls_dtprel_ha_64 (tmp2
, tmp1
, addr
);
2958 insn
= gen_tls_dtprel_ha_32 (tmp2
, tmp1
, addr
);
2961 insn
= gen_tls_dtprel_lo_64 (dest
, tmp2
, addr
);
2963 insn
= gen_tls_dtprel_lo_32 (dest
, tmp2
, addr
);
2967 tmp2
= gen_reg_rtx (Pmode
);
2969 insn
= gen_tls_got_dtprel_64 (tmp2
, got
, addr
);
2971 insn
= gen_tls_got_dtprel_32 (tmp2
, got
, addr
);
2973 insn
= gen_rtx_SET (Pmode
, dest
,
2974 gen_rtx_PLUS (Pmode
, tmp2
, tmp1
));
2980 /* IE, or 64 bit offset LE. */
2981 tmp2
= gen_reg_rtx (Pmode
);
2983 insn
= gen_tls_got_tprel_64 (tmp2
, got
, addr
);
2985 insn
= gen_tls_got_tprel_32 (tmp2
, got
, addr
);
2988 insn
= gen_tls_tls_64 (dest
, tmp2
, addr
);
2990 insn
= gen_tls_tls_32 (dest
, tmp2
, addr
);
2998 /* Return 1 if X contains a thread-local symbol. */
3001 rs6000_tls_referenced_p (rtx x
)
3003 if (! TARGET_HAVE_TLS
)
3006 return for_each_rtx (&x
, &rs6000_tls_symbol_ref_1
, 0);
3009 /* Return 1 if *X is a thread-local symbol. This is the same as
3010 rs6000_tls_symbol_ref except for the type of the unused argument. */
3013 rs6000_tls_symbol_ref_1 (rtx
*x
, void *data ATTRIBUTE_UNUSED
)
3015 return RS6000_SYMBOL_REF_TLS_P (*x
);
3018 /* The convention appears to be to define this wherever it is used.
3019 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3020 is now used here. */
3021 #ifndef REG_MODE_OK_FOR_BASE_P
3022 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3025 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3026 replace the input X, or the original X if no replacement is called for.
3027 The output parameter *WIN is 1 if the calling macro should goto WIN,
3030 For RS/6000, we wish to handle large displacements off a base
3031 register by splitting the addend across an addiu/addis and the mem insn.
3032 This cuts number of extra insns needed from 3 to 1.
3034 On Darwin, we use this to generate code for floating point constants.
3035 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3036 The Darwin code is inside #if TARGET_MACHO because only then is
3037 machopic_function_base_name() defined. */
3039 rs6000_legitimize_reload_address (rtx x
, enum machine_mode mode
,
3040 int opnum
, int type
,
3041 int ind_levels ATTRIBUTE_UNUSED
, int *win
)
3043 /* We must recognize output that we have already generated ourselves. */
3044 if (GET_CODE (x
) == PLUS
3045 && GET_CODE (XEXP (x
, 0)) == PLUS
3046 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
3047 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3048 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
3050 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
3051 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
3052 opnum
, (enum reload_type
)type
);
3058 if (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
3059 && GET_CODE (x
) == LO_SUM
3060 && GET_CODE (XEXP (x
, 0)) == PLUS
3061 && XEXP (XEXP (x
, 0), 0) == pic_offset_table_rtx
3062 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == HIGH
3063 && GET_CODE (XEXP (XEXP (XEXP (x
, 0), 1), 0)) == CONST
3064 && XEXP (XEXP (XEXP (x
, 0), 1), 0) == XEXP (x
, 1)
3065 && GET_CODE (XEXP (XEXP (x
, 1), 0)) == MINUS
3066 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 0)) == SYMBOL_REF
3067 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 1)) == SYMBOL_REF
)
3069 /* Result of previous invocation of this function on Darwin
3070 floating point constant. */
3071 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
3072 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
3073 opnum
, (enum reload_type
)type
);
3079 /* Force ld/std non-word aligned offset into base register by wrapping
3081 if (GET_CODE (x
) == PLUS
3082 && GET_CODE (XEXP (x
, 0)) == REG
3083 && REGNO (XEXP (x
, 0)) < 32
3084 && REG_MODE_OK_FOR_BASE_P (XEXP (x
, 0), mode
)
3085 && GET_CODE (XEXP (x
, 1)) == CONST_INT
3086 && (INTVAL (XEXP (x
, 1)) & 3) != 0
3087 && !ALTIVEC_VECTOR_MODE (mode
)
3088 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
3089 && TARGET_POWERPC64
)
3091 x
= gen_rtx_PLUS (GET_MODE (x
), x
, GEN_INT (0));
3092 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
3093 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
3094 opnum
, (enum reload_type
) type
);
3099 if (GET_CODE (x
) == PLUS
3100 && GET_CODE (XEXP (x
, 0)) == REG
3101 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
3102 && REG_MODE_OK_FOR_BASE_P (XEXP (x
, 0), mode
)
3103 && GET_CODE (XEXP (x
, 1)) == CONST_INT
3104 && !SPE_VECTOR_MODE (mode
)
3105 && !(TARGET_E500_DOUBLE
&& (mode
== DFmode
3107 && !ALTIVEC_VECTOR_MODE (mode
))
3109 HOST_WIDE_INT val
= INTVAL (XEXP (x
, 1));
3110 HOST_WIDE_INT low
= ((val
& 0xffff) ^ 0x8000) - 0x8000;
3112 = (((val
- low
) & 0xffffffff) ^ 0x80000000) - 0x80000000;
3114 /* Check for 32-bit overflow. */
3115 if (high
+ low
!= val
)
3121 /* Reload the high part into a base reg; leave the low part
3122 in the mem directly. */
3124 x
= gen_rtx_PLUS (GET_MODE (x
),
3125 gen_rtx_PLUS (GET_MODE (x
), XEXP (x
, 0),
3129 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
3130 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
3131 opnum
, (enum reload_type
)type
);
3137 if (GET_CODE (x
) == SYMBOL_REF
3138 && DEFAULT_ABI
== ABI_DARWIN
3139 && !ALTIVEC_VECTOR_MODE (mode
)
3140 && (flag_pic
|| MACHO_DYNAMIC_NO_PIC_P
)
3141 /* Don't do this for TFmode, since the result isn't offsettable.
3142 The same goes for DImode without 64-bit gprs. */
3144 && (mode
!= DImode
|| TARGET_POWERPC64
))
3148 rtx offset
= gen_rtx_CONST (Pmode
,
3149 gen_rtx_MINUS (Pmode
, x
,
3150 machopic_function_base_sym ()));
3151 x
= gen_rtx_LO_SUM (GET_MODE (x
),
3152 gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
,
3153 gen_rtx_HIGH (Pmode
, offset
)), offset
);
3156 x
= gen_rtx_LO_SUM (GET_MODE (x
),
3157 gen_rtx_HIGH (Pmode
, x
), x
);
3159 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
3160 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
3161 opnum
, (enum reload_type
)type
);
3168 && constant_pool_expr_p (x
)
3169 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), mode
))
3171 (x
) = create_TOC_reference (x
);
3179 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3180 that is a valid memory address for an instruction.
3181 The MODE argument is the machine mode for the MEM expression
3182 that wants to use this address.
3184 On the RS/6000, there are four valid address: a SYMBOL_REF that
3185 refers to a constant pool entry of an address (or the sum of it
3186 plus a constant), a short (16-bit signed) constant plus a register,
3187 the sum of two registers, or a register indirect, possibly with an
3188 auto-increment. For DFmode and DImode with a constant plus register,
3189 we must ensure that both words are addressable or PowerPC64 with offset
3192 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3193 32-bit DImode, TImode, TFmode), indexed addressing cannot be used because
3194 adjacent memory cells are accessed by adding word-sized offsets
3195 during assembly output. */
3197 rs6000_legitimate_address (enum machine_mode mode
, rtx x
, int reg_ok_strict
)
3199 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
3201 && ALTIVEC_VECTOR_MODE (mode
)
3202 && GET_CODE (x
) == AND
3203 && GET_CODE (XEXP (x
, 1)) == CONST_INT
3204 && INTVAL (XEXP (x
, 1)) == -16)
3207 if (RS6000_SYMBOL_REF_TLS_P (x
))
3209 if (legitimate_indirect_address_p (x
, reg_ok_strict
))
3211 if ((GET_CODE (x
) == PRE_INC
|| GET_CODE (x
) == PRE_DEC
)
3212 && !ALTIVEC_VECTOR_MODE (mode
)
3213 && !SPE_VECTOR_MODE (mode
)
3214 /* Restrict addressing for DI because of our SUBREG hackery. */
3215 && !(TARGET_E500_DOUBLE
&& (mode
== DFmode
|| mode
== DImode
))
3217 && legitimate_indirect_address_p (XEXP (x
, 0), reg_ok_strict
))
3219 if (legitimate_small_data_p (mode
, x
))
3221 if (legitimate_constant_pool_address_p (x
))
3223 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3225 && GET_CODE (x
) == PLUS
3226 && GET_CODE (XEXP (x
, 0)) == REG
3227 && (XEXP (x
, 0) == virtual_stack_vars_rtx
3228 || XEXP (x
, 0) == arg_pointer_rtx
)
3229 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
3231 if (rs6000_legitimate_offset_address_p (mode
, x
, reg_ok_strict
))
3235 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3237 || ((mode
!= DFmode
|| TARGET_E500_DOUBLE
) && mode
!= TFmode
))
3238 && (TARGET_POWERPC64
|| mode
!= DImode
)
3239 && legitimate_indexed_address_p (x
, reg_ok_strict
))
3241 if (legitimate_lo_sum_address_p (mode
, x
, reg_ok_strict
))
3246 /* Go to LABEL if ADDR (a legitimate address expression)
3247 has an effect that depends on the machine mode it is used for.
3249 On the RS/6000 this is true of all integral offsets (since AltiVec
3250 modes don't allow them) or is a pre-increment or decrement.
3252 ??? Except that due to conceptual problems in offsettable_address_p
3253 we can't really report the problems of integral offsets. So leave
3254 this assuming that the adjustable offset must be valid for the
3255 sub-words of a TFmode operand, which is what we had before. */
3258 rs6000_mode_dependent_address (rtx addr
)
3260 switch (GET_CODE (addr
))
3263 if (GET_CODE (XEXP (addr
, 1)) == CONST_INT
)
3265 unsigned HOST_WIDE_INT val
= INTVAL (XEXP (addr
, 1));
3266 return val
+ 12 + 0x8000 >= 0x10000;
3275 return TARGET_UPDATE
;
3284 /* Return number of consecutive hard regs needed starting at reg REGNO
3285 to hold something of mode MODE.
3286 This is ordinarily the length in words of a value of mode MODE
3287 but can be less for certain modes in special long registers.
3289 For the SPE, GPRs are 64 bits but only 32 bits are visible in
3290 scalar instructions. The upper 32 bits are only available to the
3293 POWER and PowerPC GPRs hold 32 bits worth;
3294 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
3297 rs6000_hard_regno_nregs (int regno
, enum machine_mode mode
)
3299 if (FP_REGNO_P (regno
))
3300 return (GET_MODE_SIZE (mode
) + UNITS_PER_FP_WORD
- 1) / UNITS_PER_FP_WORD
;
3302 if (TARGET_E500_DOUBLE
&& mode
== DFmode
)
3305 if (SPE_SIMD_REGNO_P (regno
) && TARGET_SPE
&& SPE_VECTOR_MODE (mode
))
3306 return (GET_MODE_SIZE (mode
) + UNITS_PER_SPE_WORD
- 1) / UNITS_PER_SPE_WORD
;
3308 if (ALTIVEC_REGNO_P (regno
))
3310 (GET_MODE_SIZE (mode
) + UNITS_PER_ALTIVEC_WORD
- 1) / UNITS_PER_ALTIVEC_WORD
;
3312 return (GET_MODE_SIZE (mode
) + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
3315 /* Change register usage conditional on target flags. */
3317 rs6000_conditional_register_usage (void)
3321 /* Set MQ register fixed (already call_used) if not POWER
3322 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
3327 /* 64-bit AIX and Linux reserve GPR13 for thread-private data. */
3329 fixed_regs
[13] = call_used_regs
[13]
3330 = call_really_used_regs
[13] = 1;
3332 /* Conditionally disable FPRs. */
3333 if (TARGET_SOFT_FLOAT
|| !TARGET_FPRS
)
3334 for (i
= 32; i
< 64; i
++)
3335 fixed_regs
[i
] = call_used_regs
[i
]
3336 = call_really_used_regs
[i
] = 1;
3338 /* The TOC register is not killed across calls in a way that is
3339 visible to the compiler. */
3340 if (DEFAULT_ABI
== ABI_AIX
)
3341 call_really_used_regs
[2] = 0;
3343 if (DEFAULT_ABI
== ABI_V4
3344 && PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
3346 fixed_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
3348 if (DEFAULT_ABI
== ABI_V4
3349 && PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
3351 fixed_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
]
3352 = call_used_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
]
3353 = call_really_used_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
3355 if (DEFAULT_ABI
== ABI_DARWIN
3356 && PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
)
3357 fixed_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
]
3358 = call_used_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
]
3359 = call_really_used_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
3361 if (TARGET_TOC
&& TARGET_MINIMAL_TOC
)
3362 fixed_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
]
3363 = call_used_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
3366 global_regs
[VSCR_REGNO
] = 1;
3370 global_regs
[SPEFSCR_REGNO
] = 1;
3371 fixed_regs
[FIXED_SCRATCH
]
3372 = call_used_regs
[FIXED_SCRATCH
]
3373 = call_really_used_regs
[FIXED_SCRATCH
] = 1;
3376 if (! TARGET_ALTIVEC
)
3378 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
3379 fixed_regs
[i
] = call_used_regs
[i
] = call_really_used_regs
[i
] = 1;
3380 call_really_used_regs
[VRSAVE_REGNO
] = 1;
3383 if (TARGET_ALTIVEC_ABI
)
3384 for (i
= FIRST_ALTIVEC_REGNO
; i
< FIRST_ALTIVEC_REGNO
+ 20; ++i
)
3385 call_used_regs
[i
] = call_really_used_regs
[i
] = 1;
3388 /* Try to output insns to set TARGET equal to the constant C if it can
3389 be done in less than N insns. Do all computations in MODE.
3390 Returns the place where the output has been placed if it can be
3391 done and the insns have been emitted. If it would take more than N
3392 insns, zero is returned and no insns and emitted. */
3395 rs6000_emit_set_const (rtx dest
, enum machine_mode mode
,
3396 rtx source
, int n ATTRIBUTE_UNUSED
)
3398 rtx result
, insn
, set
;
3399 HOST_WIDE_INT c0
, c1
;
3406 dest
= gen_reg_rtx (mode
);
3407 emit_insn (gen_rtx_SET (VOIDmode
, dest
, source
));
3411 result
= no_new_pseudos
? dest
: gen_reg_rtx (SImode
);
3413 emit_insn (gen_rtx_SET (VOIDmode
, result
,
3414 GEN_INT (INTVAL (source
)
3415 & (~ (HOST_WIDE_INT
) 0xffff))));
3416 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
3417 gen_rtx_IOR (SImode
, result
,
3418 GEN_INT (INTVAL (source
) & 0xffff))));
3423 switch (GET_CODE (source
))
3426 c0
= INTVAL (source
);
3431 #if HOST_BITS_PER_WIDE_INT >= 64
3432 c0
= CONST_DOUBLE_LOW (source
);
3435 c0
= CONST_DOUBLE_LOW (source
);
3436 c1
= CONST_DOUBLE_HIGH (source
);
3444 result
= rs6000_emit_set_long_const (dest
, c0
, c1
);
3451 insn
= get_last_insn ();
3452 set
= single_set (insn
);
3453 if (! CONSTANT_P (SET_SRC (set
)))
3454 set_unique_reg_note (insn
, REG_EQUAL
, source
);
3459 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3460 fall back to a straight forward decomposition. We do this to avoid
3461 exponential run times encountered when looking for longer sequences
3462 with rs6000_emit_set_const. */
3464 rs6000_emit_set_long_const (rtx dest
, HOST_WIDE_INT c1
, HOST_WIDE_INT c2
)
3466 if (!TARGET_POWERPC64
)
3468 rtx operand1
, operand2
;
3470 operand1
= operand_subword_force (dest
, WORDS_BIG_ENDIAN
== 0,
3472 operand2
= operand_subword_force (dest
, WORDS_BIG_ENDIAN
!= 0,
3474 emit_move_insn (operand1
, GEN_INT (c1
));
3475 emit_move_insn (operand2
, GEN_INT (c2
));
3479 HOST_WIDE_INT ud1
, ud2
, ud3
, ud4
;
3482 ud2
= (c1
& 0xffff0000) >> 16;
3483 #if HOST_BITS_PER_WIDE_INT >= 64
3487 ud4
= (c2
& 0xffff0000) >> 16;
3489 if ((ud4
== 0xffff && ud3
== 0xffff && ud2
== 0xffff && (ud1
& 0x8000))
3490 || (ud4
== 0 && ud3
== 0 && ud2
== 0 && ! (ud1
& 0x8000)))
3493 emit_move_insn (dest
, GEN_INT (((ud1
^ 0x8000) - 0x8000)));
3495 emit_move_insn (dest
, GEN_INT (ud1
));
3498 else if ((ud4
== 0xffff && ud3
== 0xffff && (ud2
& 0x8000))
3499 || (ud4
== 0 && ud3
== 0 && ! (ud2
& 0x8000)))
3502 emit_move_insn (dest
, GEN_INT (((ud2
<< 16) ^ 0x80000000)
3505 emit_move_insn (dest
, GEN_INT (ud2
<< 16));
3507 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
3509 else if ((ud4
== 0xffff && (ud3
& 0x8000))
3510 || (ud4
== 0 && ! (ud3
& 0x8000)))
3513 emit_move_insn (dest
, GEN_INT (((ud3
<< 16) ^ 0x80000000)
3516 emit_move_insn (dest
, GEN_INT (ud3
<< 16));
3519 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud2
)));
3520 emit_move_insn (dest
, gen_rtx_ASHIFT (DImode
, dest
, GEN_INT (16)));
3522 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
3527 emit_move_insn (dest
, GEN_INT (((ud4
<< 16) ^ 0x80000000)
3530 emit_move_insn (dest
, GEN_INT (ud4
<< 16));
3533 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud3
)));
3535 emit_move_insn (dest
, gen_rtx_ASHIFT (DImode
, dest
, GEN_INT (32)));
3537 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
,
3538 GEN_INT (ud2
<< 16)));
3540 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
3546 /* Helper for the following. Get rid of [r+r] memory refs
3547 in cases where it won't work (TImode, TFmode). */
3550 rs6000_eliminate_indexed_memrefs (rtx operands
[2])
3552 if (GET_CODE (operands
[0]) == MEM
3553 && GET_CODE (XEXP (operands
[0], 0)) != REG
3554 && ! legitimate_constant_pool_address_p (XEXP (operands
[0], 0))
3555 && ! reload_in_progress
)
3557 = replace_equiv_address (operands
[0],
3558 copy_addr_to_reg (XEXP (operands
[0], 0)));
3560 if (GET_CODE (operands
[1]) == MEM
3561 && GET_CODE (XEXP (operands
[1], 0)) != REG
3562 && ! legitimate_constant_pool_address_p (XEXP (operands
[1], 0))
3563 && ! reload_in_progress
)
3565 = replace_equiv_address (operands
[1],
3566 copy_addr_to_reg (XEXP (operands
[1], 0)));
3569 /* Emit a move from SOURCE to DEST in mode MODE. */
3571 rs6000_emit_move (rtx dest
, rtx source
, enum machine_mode mode
)
3575 operands
[1] = source
;
3577 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3578 if (GET_CODE (operands
[1]) == CONST_DOUBLE
3579 && ! FLOAT_MODE_P (mode
)
3580 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
3582 /* FIXME. This should never happen. */
3583 /* Since it seems that it does, do the safe thing and convert
3585 operands
[1] = gen_int_mode (CONST_DOUBLE_LOW (operands
[1]), mode
);
3587 gcc_assert (GET_CODE (operands
[1]) != CONST_DOUBLE
3588 || FLOAT_MODE_P (mode
)
3589 || ((CONST_DOUBLE_HIGH (operands
[1]) != 0
3590 || CONST_DOUBLE_LOW (operands
[1]) < 0)
3591 && (CONST_DOUBLE_HIGH (operands
[1]) != -1
3592 || CONST_DOUBLE_LOW (operands
[1]) >= 0)));
3594 /* Check if GCC is setting up a block move that will end up using FP
3595 registers as temporaries. We must make sure this is acceptable. */
3596 if (GET_CODE (operands
[0]) == MEM
3597 && GET_CODE (operands
[1]) == MEM
3599 && (SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[0]))
3600 || SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[1])))
3601 && ! (SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[0]) > 32
3602 ? 32 : MEM_ALIGN (operands
[0])))
3603 || SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[1]) > 32
3605 : MEM_ALIGN (operands
[1]))))
3606 && ! MEM_VOLATILE_P (operands
[0])
3607 && ! MEM_VOLATILE_P (operands
[1]))
3609 emit_move_insn (adjust_address (operands
[0], SImode
, 0),
3610 adjust_address (operands
[1], SImode
, 0));
3611 emit_move_insn (adjust_address (operands
[0], SImode
, 4),
3612 adjust_address (operands
[1], SImode
, 4));
3616 if (!no_new_pseudos
&& GET_CODE (operands
[0]) == MEM
3617 && !gpc_reg_operand (operands
[1], mode
))
3618 operands
[1] = force_reg (mode
, operands
[1]);
3620 if (mode
== SFmode
&& ! TARGET_POWERPC
3621 && TARGET_HARD_FLOAT
&& TARGET_FPRS
3622 && GET_CODE (operands
[0]) == MEM
)
3626 if (reload_in_progress
|| reload_completed
)
3627 regnum
= true_regnum (operands
[1]);
3628 else if (GET_CODE (operands
[1]) == REG
)
3629 regnum
= REGNO (operands
[1]);
3633 /* If operands[1] is a register, on POWER it may have
3634 double-precision data in it, so truncate it to single
3636 if (FP_REGNO_P (regnum
) || regnum
>= FIRST_PSEUDO_REGISTER
)
3639 newreg
= (no_new_pseudos
? operands
[1] : gen_reg_rtx (mode
));
3640 emit_insn (gen_aux_truncdfsf2 (newreg
, operands
[1]));
3641 operands
[1] = newreg
;
3645 /* Recognize the case where operand[1] is a reference to thread-local
3646 data and load its address to a register. */
3647 if (rs6000_tls_referenced_p (operands
[1]))
3649 enum tls_model model
;
3650 rtx tmp
= operands
[1];
3653 if (GET_CODE (tmp
) == CONST
&& GET_CODE (XEXP (tmp
, 0)) == PLUS
)
3655 addend
= XEXP (XEXP (tmp
, 0), 1);
3656 tmp
= XEXP (XEXP (tmp
, 0), 0);
3659 gcc_assert (GET_CODE (tmp
) == SYMBOL_REF
);
3660 model
= SYMBOL_REF_TLS_MODEL (tmp
);
3661 gcc_assert (model
!= 0);
3663 tmp
= rs6000_legitimize_tls_address (tmp
, model
);
3666 tmp
= gen_rtx_PLUS (mode
, tmp
, addend
);
3667 tmp
= force_operand (tmp
, operands
[0]);
3672 /* Handle the case where reload calls us with an invalid address. */
3673 if (reload_in_progress
&& mode
== Pmode
3674 && (! general_operand (operands
[1], mode
)
3675 || ! nonimmediate_operand (operands
[0], mode
)))
3678 /* 128-bit constant floating-point values on Darwin should really be
3679 loaded as two parts. */
3680 if ((DEFAULT_ABI
== ABI_AIX
|| DEFAULT_ABI
== ABI_DARWIN
)
3681 && TARGET_HARD_FLOAT
&& TARGET_FPRS
&& TARGET_LONG_DOUBLE_128
3682 && mode
== TFmode
&& GET_CODE (operands
[1]) == CONST_DOUBLE
)
3684 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
3685 know how to get a DFmode SUBREG of a TFmode. */
3686 rs6000_emit_move (simplify_gen_subreg (DImode
, operands
[0], mode
, 0),
3687 simplify_gen_subreg (DImode
, operands
[1], mode
, 0),
3689 rs6000_emit_move (simplify_gen_subreg (DImode
, operands
[0], mode
,
3690 GET_MODE_SIZE (DImode
)),
3691 simplify_gen_subreg (DImode
, operands
[1], mode
,
3692 GET_MODE_SIZE (DImode
)),
3697 /* FIXME: In the long term, this switch statement should go away
3698 and be replaced by a sequence of tests based on things like
3704 if (CONSTANT_P (operands
[1])
3705 && GET_CODE (operands
[1]) != CONST_INT
)
3706 operands
[1] = force_const_mem (mode
, operands
[1]);
3710 rs6000_eliminate_indexed_memrefs (operands
);
3715 if (CONSTANT_P (operands
[1])
3716 && ! easy_fp_constant (operands
[1], mode
))
3717 operands
[1] = force_const_mem (mode
, operands
[1]);
3728 if (CONSTANT_P (operands
[1])
3729 && !easy_vector_constant (operands
[1], mode
))
3730 operands
[1] = force_const_mem (mode
, operands
[1]);
3735 /* Use default pattern for address of ELF small data */
3738 && DEFAULT_ABI
== ABI_V4
3739 && (GET_CODE (operands
[1]) == SYMBOL_REF
3740 || GET_CODE (operands
[1]) == CONST
)
3741 && small_data_operand (operands
[1], mode
))
3743 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
3747 if (DEFAULT_ABI
== ABI_V4
3748 && mode
== Pmode
&& mode
== SImode
3749 && flag_pic
== 1 && got_operand (operands
[1], mode
))
3751 emit_insn (gen_movsi_got (operands
[0], operands
[1]));
3755 if ((TARGET_ELF
|| DEFAULT_ABI
== ABI_DARWIN
)
3759 && CONSTANT_P (operands
[1])
3760 && GET_CODE (operands
[1]) != HIGH
3761 && GET_CODE (operands
[1]) != CONST_INT
)
3763 rtx target
= (no_new_pseudos
? operands
[0] : gen_reg_rtx (mode
));
3765 /* If this is a function address on -mcall-aixdesc,
3766 convert it to the address of the descriptor. */
3767 if (DEFAULT_ABI
== ABI_AIX
3768 && GET_CODE (operands
[1]) == SYMBOL_REF
3769 && XSTR (operands
[1], 0)[0] == '.')
3771 const char *name
= XSTR (operands
[1], 0);
3773 while (*name
== '.')
3775 new_ref
= gen_rtx_SYMBOL_REF (Pmode
, name
);
3776 CONSTANT_POOL_ADDRESS_P (new_ref
)
3777 = CONSTANT_POOL_ADDRESS_P (operands
[1]);
3778 SYMBOL_REF_FLAGS (new_ref
) = SYMBOL_REF_FLAGS (operands
[1]);
3779 SYMBOL_REF_USED (new_ref
) = SYMBOL_REF_USED (operands
[1]);
3780 SYMBOL_REF_DECL (new_ref
) = SYMBOL_REF_DECL (operands
[1]);
3781 operands
[1] = new_ref
;
3784 if (DEFAULT_ABI
== ABI_DARWIN
)
3787 if (MACHO_DYNAMIC_NO_PIC_P
)
3789 /* Take care of any required data indirection. */
3790 operands
[1] = rs6000_machopic_legitimize_pic_address (
3791 operands
[1], mode
, operands
[0]);
3792 if (operands
[0] != operands
[1])
3793 emit_insn (gen_rtx_SET (VOIDmode
,
3794 operands
[0], operands
[1]));
3798 emit_insn (gen_macho_high (target
, operands
[1]));
3799 emit_insn (gen_macho_low (operands
[0], target
, operands
[1]));
3803 emit_insn (gen_elf_high (target
, operands
[1]));
3804 emit_insn (gen_elf_low (operands
[0], target
, operands
[1]));
3808 /* If this is a SYMBOL_REF that refers to a constant pool entry,
3809 and we have put it in the TOC, we just need to make a TOC-relative
3812 && GET_CODE (operands
[1]) == SYMBOL_REF
3813 && constant_pool_expr_p (operands
[1])
3814 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands
[1]),
3815 get_pool_mode (operands
[1])))
3817 operands
[1] = create_TOC_reference (operands
[1]);
3819 else if (mode
== Pmode
3820 && CONSTANT_P (operands
[1])
3821 && ((GET_CODE (operands
[1]) != CONST_INT
3822 && ! easy_fp_constant (operands
[1], mode
))
3823 || (GET_CODE (operands
[1]) == CONST_INT
3824 && num_insns_constant (operands
[1], mode
) > 2)
3825 || (GET_CODE (operands
[0]) == REG
3826 && FP_REGNO_P (REGNO (operands
[0]))))
3827 && GET_CODE (operands
[1]) != HIGH
3828 && ! legitimate_constant_pool_address_p (operands
[1])
3829 && ! toc_relative_expr_p (operands
[1]))
3831 /* Emit a USE operation so that the constant isn't deleted if
3832 expensive optimizations are turned on because nobody
3833 references it. This should only be done for operands that
3834 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
3835 This should not be done for operands that contain LABEL_REFs.
3836 For now, we just handle the obvious case. */
3837 if (GET_CODE (operands
[1]) != LABEL_REF
)
3838 emit_insn (gen_rtx_USE (VOIDmode
, operands
[1]));
3841 /* Darwin uses a special PIC legitimizer. */
3842 if (DEFAULT_ABI
== ABI_DARWIN
&& MACHOPIC_INDIRECT
)
3845 rs6000_machopic_legitimize_pic_address (operands
[1], mode
,
3847 if (operands
[0] != operands
[1])
3848 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
3853 /* If we are to limit the number of things we put in the TOC and
3854 this is a symbol plus a constant we can add in one insn,
3855 just put the symbol in the TOC and add the constant. Don't do
3856 this if reload is in progress. */
3857 if (GET_CODE (operands
[1]) == CONST
3858 && TARGET_NO_SUM_IN_TOC
&& ! reload_in_progress
3859 && GET_CODE (XEXP (operands
[1], 0)) == PLUS
3860 && add_operand (XEXP (XEXP (operands
[1], 0), 1), mode
)
3861 && (GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == LABEL_REF
3862 || GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == SYMBOL_REF
)
3863 && ! side_effects_p (operands
[0]))
3866 force_const_mem (mode
, XEXP (XEXP (operands
[1], 0), 0));
3867 rtx other
= XEXP (XEXP (operands
[1], 0), 1);
3869 sym
= force_reg (mode
, sym
);
3871 emit_insn (gen_addsi3 (operands
[0], sym
, other
));
3873 emit_insn (gen_adddi3 (operands
[0], sym
, other
));
3877 operands
[1] = force_const_mem (mode
, operands
[1]);
3880 && constant_pool_expr_p (XEXP (operands
[1], 0))
3881 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3882 get_pool_constant (XEXP (operands
[1], 0)),
3883 get_pool_mode (XEXP (operands
[1], 0))))
3886 = gen_const_mem (mode
,
3887 create_TOC_reference (XEXP (operands
[1], 0)));
3888 set_mem_alias_set (operands
[1], get_TOC_alias_set ());
3894 rs6000_eliminate_indexed_memrefs (operands
);
3898 emit_insn (gen_rtx_PARALLEL (VOIDmode
,
3900 gen_rtx_SET (VOIDmode
,
3901 operands
[0], operands
[1]),
3902 gen_rtx_CLOBBER (VOIDmode
,
3903 gen_rtx_SCRATCH (SImode
)))));
3912 /* Above, we may have called force_const_mem which may have returned
3913 an invalid address. If we can, fix this up; otherwise, reload will
3914 have to deal with it. */
3915 if (GET_CODE (operands
[1]) == MEM
&& ! reload_in_progress
)
3916 operands
[1] = validize_mem (operands
[1]);
3919 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
3922 /* Nonzero if we can use a floating-point register to pass this arg. */
3923 #define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
3924 (GET_MODE_CLASS (MODE) == MODE_FLOAT \
3925 && (CUM)->fregno <= FP_ARG_MAX_REG \
3926 && TARGET_HARD_FLOAT && TARGET_FPRS)
3928 /* Nonzero if we can use an AltiVec register to pass this arg. */
3929 #define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
3930 (ALTIVEC_VECTOR_MODE (MODE) \
3931 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
3932 && TARGET_ALTIVEC_ABI \
3935 /* Return a nonzero value to say to return the function value in
3936 memory, just as large structures are always returned. TYPE will be
3937 the data type of the value, and FNTYPE will be the type of the
3938 function doing the returning, or @code{NULL} for libcalls.
3940 The AIX ABI for the RS/6000 specifies that all structures are
3941 returned in memory. The Darwin ABI does the same. The SVR4 ABI
3942 specifies that structures <= 8 bytes are returned in r3/r4, but a
3943 draft put them in memory, and GCC used to implement the draft
3944 instead of the final standard. Therefore, aix_struct_return
3945 controls this instead of DEFAULT_ABI; V.4 targets needing backward
3946 compatibility can change DRAFT_V4_STRUCT_RET to override the
3947 default, and -m switches get the final word. See
3948 rs6000_override_options for more details.
3950 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
3951 long double support is enabled. These values are returned in memory.
3953 int_size_in_bytes returns -1 for variable size objects, which go in
3954 memory always. The cast to unsigned makes -1 > 8. */
3957 rs6000_return_in_memory (tree type
, tree fntype ATTRIBUTE_UNUSED
)
3959 /* In the darwin64 abi, try to use registers for larger structs
3961 if (rs6000_darwin64_abi
3962 && TREE_CODE (type
) == RECORD_TYPE
3963 && int_size_in_bytes (type
) > 0)
3965 CUMULATIVE_ARGS valcum
;
3969 valcum
.fregno
= FP_ARG_MIN_REG
;
3970 valcum
.vregno
= ALTIVEC_ARG_MIN_REG
;
3971 /* Do a trial code generation as if this were going to be passed
3972 as an argument; if any part goes in memory, we return NULL. */
3973 valret
= rs6000_darwin64_record_arg (&valcum
, type
, 1, true);
3976 /* Otherwise fall through to more conventional ABI rules. */
3979 if (AGGREGATE_TYPE_P (type
)
3980 && (aix_struct_return
3981 || (unsigned HOST_WIDE_INT
) int_size_in_bytes (type
) > 8))
3984 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
3985 modes only exist for GCC vector types if -maltivec. */
3986 if (TARGET_32BIT
&& !TARGET_ALTIVEC_ABI
3987 && ALTIVEC_VECTOR_MODE (TYPE_MODE (type
)))
3990 /* Return synthetic vectors in memory. */
3991 if (TREE_CODE (type
) == VECTOR_TYPE
3992 && int_size_in_bytes (type
) > (TARGET_ALTIVEC_ABI
? 16 : 8))
3994 static bool warned_for_return_big_vectors
= false;
3995 if (!warned_for_return_big_vectors
)
3997 warning (0, "GCC vector returned by reference: "
3998 "non-standard ABI extension with no compatibility guarantee");
3999 warned_for_return_big_vectors
= true;
4004 if (DEFAULT_ABI
== ABI_V4
&& TYPE_MODE (type
) == TFmode
)
4010 /* Initialize a variable CUM of type CUMULATIVE_ARGS
4011 for a call to a function whose data type is FNTYPE.
4012 For a library call, FNTYPE is 0.
4014 For incoming args we set the number of arguments in the prototype large
4015 so we never return a PARALLEL. */
4018 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
,
4019 rtx libname ATTRIBUTE_UNUSED
, int incoming
,
4020 int libcall
, int n_named_args
)
4022 static CUMULATIVE_ARGS zero_cumulative
;
4024 *cum
= zero_cumulative
;
4026 cum
->fregno
= FP_ARG_MIN_REG
;
4027 cum
->vregno
= ALTIVEC_ARG_MIN_REG
;
4028 cum
->prototype
= (fntype
&& TYPE_ARG_TYPES (fntype
));
4029 cum
->call_cookie
= ((DEFAULT_ABI
== ABI_V4
&& libcall
)
4030 ? CALL_LIBCALL
: CALL_NORMAL
);
4031 cum
->sysv_gregno
= GP_ARG_MIN_REG
;
4032 cum
->stdarg
= fntype
4033 && (TYPE_ARG_TYPES (fntype
) != 0
4034 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
4035 != void_type_node
));
4037 cum
->nargs_prototype
= 0;
4038 if (incoming
|| cum
->prototype
)
4039 cum
->nargs_prototype
= n_named_args
;
4041 /* Check for a longcall attribute. */
4042 if ((!fntype
&& rs6000_default_long_calls
)
4044 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype
))
4045 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype
))))
4046 cum
->call_cookie
|= CALL_LONG
;
4048 if (TARGET_DEBUG_ARG
)
4050 fprintf (stderr
, "\ninit_cumulative_args:");
4053 tree ret_type
= TREE_TYPE (fntype
);
4054 fprintf (stderr
, " ret code = %s,",
4055 tree_code_name
[ (int)TREE_CODE (ret_type
) ]);
4058 if (cum
->call_cookie
& CALL_LONG
)
4059 fprintf (stderr
, " longcall,");
4061 fprintf (stderr
, " proto = %d, nargs = %d\n",
4062 cum
->prototype
, cum
->nargs_prototype
);
4067 && TARGET_ALTIVEC_ABI
4068 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype
))))
4070 error ("cannot return value in vector register because"
4071 " altivec instructions are disabled, use -maltivec"
4076 /* Return true if TYPE must be passed on the stack and not in registers. */
4079 rs6000_must_pass_in_stack (enum machine_mode mode
, tree type
)
4081 if (DEFAULT_ABI
== ABI_AIX
|| TARGET_64BIT
)
4082 return must_pass_in_stack_var_size (mode
, type
);
4084 return must_pass_in_stack_var_size_or_pad (mode
, type
);
4087 /* If defined, a C expression which determines whether, and in which
4088 direction, to pad out an argument with extra space. The value
4089 should be of type `enum direction': either `upward' to pad above
4090 the argument, `downward' to pad below, or `none' to inhibit
4093 For the AIX ABI structs are always stored left shifted in their
4097 function_arg_padding (enum machine_mode mode
, tree type
)
4099 #ifndef AGGREGATE_PADDING_FIXED
4100 #define AGGREGATE_PADDING_FIXED 0
4102 #ifndef AGGREGATES_PAD_UPWARD_ALWAYS
4103 #define AGGREGATES_PAD_UPWARD_ALWAYS 0
4106 if (!AGGREGATE_PADDING_FIXED
)
4108 /* GCC used to pass structures of the same size as integer types as
4109 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
4110 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
4111 passed padded downward, except that -mstrict-align further
4112 muddied the water in that multi-component structures of 2 and 4
4113 bytes in size were passed padded upward.
4115 The following arranges for best compatibility with previous
4116 versions of gcc, but removes the -mstrict-align dependency. */
4117 if (BYTES_BIG_ENDIAN
)
4119 HOST_WIDE_INT size
= 0;
4121 if (mode
== BLKmode
)
4123 if (type
&& TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
)
4124 size
= int_size_in_bytes (type
);
4127 size
= GET_MODE_SIZE (mode
);
4129 if (size
== 1 || size
== 2 || size
== 4)
4135 if (AGGREGATES_PAD_UPWARD_ALWAYS
)
4137 if (type
!= 0 && AGGREGATE_TYPE_P (type
))
4141 /* Fall back to the default. */
4142 return DEFAULT_FUNCTION_ARG_PADDING (mode
, type
);
4145 /* If defined, a C expression that gives the alignment boundary, in bits,
4146 of an argument with the specified mode and type. If it is not defined,
4147 PARM_BOUNDARY is used for all arguments.
4149 V.4 wants long longs to be double word aligned.
4150 Doubleword align SPE vectors.
4151 Quadword align Altivec vectors.
4152 Quadword align large synthetic vector types. */
4155 function_arg_boundary (enum machine_mode mode
, tree type
)
4157 if (DEFAULT_ABI
== ABI_V4
&& GET_MODE_SIZE (mode
) == 8)
4159 else if (SPE_VECTOR_MODE (mode
)
4160 || (type
&& TREE_CODE (type
) == VECTOR_TYPE
4161 && int_size_in_bytes (type
) >= 8
4162 && int_size_in_bytes (type
) < 16))
4164 else if (ALTIVEC_VECTOR_MODE (mode
)
4165 || (type
&& TREE_CODE (type
) == VECTOR_TYPE
4166 && int_size_in_bytes (type
) >= 16))
4168 else if (rs6000_darwin64_abi
&& mode
== BLKmode
4169 && type
&& TYPE_ALIGN (type
) > 64)
4172 return PARM_BOUNDARY
;
4175 /* For a function parm of MODE and TYPE, return the starting word in
4176 the parameter area. NWORDS of the parameter area are already used. */
4179 rs6000_parm_start (enum machine_mode mode
, tree type
, unsigned int nwords
)
4182 unsigned int parm_offset
;
4184 align
= function_arg_boundary (mode
, type
) / PARM_BOUNDARY
- 1;
4185 parm_offset
= DEFAULT_ABI
== ABI_V4
? 2 : 6;
4186 return nwords
+ (-(parm_offset
+ nwords
) & align
);
4189 /* Compute the size (in words) of a function argument. */
4191 static unsigned long
4192 rs6000_arg_size (enum machine_mode mode
, tree type
)
4196 if (mode
!= BLKmode
)
4197 size
= GET_MODE_SIZE (mode
);
4199 size
= int_size_in_bytes (type
);
4202 return (size
+ 3) >> 2;
4204 return (size
+ 7) >> 3;
4207 /* Use this to flush pending int fields. */
4210 rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS
*cum
,
4211 HOST_WIDE_INT bitpos
)
4213 unsigned int startbit
, endbit
;
4214 int intregs
, intoffset
;
4215 enum machine_mode mode
;
4217 if (cum
->intoffset
== -1)
4220 intoffset
= cum
->intoffset
;
4221 cum
->intoffset
= -1;
4223 if (intoffset
% BITS_PER_WORD
!= 0)
4225 mode
= mode_for_size (BITS_PER_WORD
- intoffset
% BITS_PER_WORD
,
4227 if (mode
== BLKmode
)
4229 /* We couldn't find an appropriate mode, which happens,
4230 e.g., in packed structs when there are 3 bytes to load.
4231 Back intoffset back to the beginning of the word in this
4233 intoffset
= intoffset
& -BITS_PER_WORD
;
4237 startbit
= intoffset
& -BITS_PER_WORD
;
4238 endbit
= (bitpos
+ BITS_PER_WORD
- 1) & -BITS_PER_WORD
;
4239 intregs
= (endbit
- startbit
) / BITS_PER_WORD
;
4240 cum
->words
+= intregs
;
4243 /* The darwin64 ABI calls for us to recurse down through structs,
4244 looking for elements passed in registers. Unfortunately, we have
4245 to track int register count here also because of misalignments
4246 in powerpc alignment mode. */
4249 rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS
*cum
,
4251 HOST_WIDE_INT startbitpos
)
4255 for (f
= TYPE_FIELDS (type
); f
; f
= TREE_CHAIN (f
))
4256 if (TREE_CODE (f
) == FIELD_DECL
)
4258 HOST_WIDE_INT bitpos
= startbitpos
;
4259 tree ftype
= TREE_TYPE (f
);
4260 enum machine_mode mode
= TYPE_MODE (ftype
);
4262 if (DECL_SIZE (f
) != 0
4263 && host_integerp (bit_position (f
), 1))
4264 bitpos
+= int_bit_position (f
);
4266 /* ??? FIXME: else assume zero offset. */
4268 if (TREE_CODE (ftype
) == RECORD_TYPE
)
4269 rs6000_darwin64_record_arg_advance_recurse (cum
, ftype
, bitpos
);
4270 else if (USE_FP_FOR_ARG_P (cum
, mode
, ftype
))
4272 rs6000_darwin64_record_arg_advance_flush (cum
, bitpos
);
4273 cum
->fregno
+= (GET_MODE_SIZE (mode
) + 7) >> 3;
4274 cum
->words
+= (GET_MODE_SIZE (mode
) + 7) >> 3;
4276 else if (USE_ALTIVEC_FOR_ARG_P (cum
, mode
, type
, 1))
4278 rs6000_darwin64_record_arg_advance_flush (cum
, bitpos
);
4282 else if (cum
->intoffset
== -1)
4283 cum
->intoffset
= bitpos
;
4287 /* Update the data in CUM to advance over an argument
4288 of mode MODE and data type TYPE.
4289 (TYPE is null for libcalls where that information may not be available.)
4291 Note that for args passed by reference, function_arg will be called
4292 with MODE and TYPE set to that of the pointer to the arg, not the arg
4296 function_arg_advance (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
4297 tree type
, int named
, int depth
)
4301 /* Only tick off an argument if we're not recursing. */
4303 cum
->nargs_prototype
--;
4305 if (TARGET_ALTIVEC_ABI
4306 && (ALTIVEC_VECTOR_MODE (mode
)
4307 || (type
&& TREE_CODE (type
) == VECTOR_TYPE
4308 && int_size_in_bytes (type
) == 16)))
4312 if (USE_ALTIVEC_FOR_ARG_P (cum
, mode
, type
, named
))
4315 if (!TARGET_ALTIVEC
)
4316 error ("cannot pass argument in vector register because"
4317 " altivec instructions are disabled, use -maltivec"
4320 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
4321 even if it is going to be passed in a vector register.
4322 Darwin does the same for variable-argument functions. */
4323 if ((DEFAULT_ABI
== ABI_AIX
&& TARGET_64BIT
)
4324 || (cum
->stdarg
&& DEFAULT_ABI
!= ABI_V4
))
4334 /* Vector parameters must be 16-byte aligned. This places
4335 them at 2 mod 4 in terms of words in 32-bit mode, since
4336 the parameter save area starts at offset 24 from the
4337 stack. In 64-bit mode, they just have to start on an
4338 even word, since the parameter save area is 16-byte
4339 aligned. Space for GPRs is reserved even if the argument
4340 will be passed in memory. */
4342 align
= (2 - cum
->words
) & 3;
4344 align
= cum
->words
& 1;
4345 cum
->words
+= align
+ rs6000_arg_size (mode
, type
);
4347 if (TARGET_DEBUG_ARG
)
4349 fprintf (stderr
, "function_adv: words = %2d, align=%d, ",
4351 fprintf (stderr
, "nargs = %4d, proto = %d, mode = %4s\n",
4352 cum
->nargs_prototype
, cum
->prototype
,
4353 GET_MODE_NAME (mode
));
4357 else if (TARGET_SPE_ABI
&& TARGET_SPE
&& SPE_VECTOR_MODE (mode
)
4359 && cum
->sysv_gregno
<= GP_ARG_MAX_REG
)
4362 else if (rs6000_darwin64_abi
4364 && TREE_CODE (type
) == RECORD_TYPE
4365 && (size
= int_size_in_bytes (type
)) > 0)
4367 /* Variable sized types have size == -1 and are
4368 treated as if consisting entirely of ints.
4369 Pad to 16 byte boundary if needed. */
4370 if (TYPE_ALIGN (type
) >= 2 * BITS_PER_WORD
4371 && (cum
->words
% 2) != 0)
4373 /* For varargs, we can just go up by the size of the struct. */
4375 cum
->words
+= (size
+ 7) / 8;
4378 /* It is tempting to say int register count just goes up by
4379 sizeof(type)/8, but this is wrong in a case such as
4380 { int; double; int; } [powerpc alignment]. We have to
4381 grovel through the fields for these too. */
4383 rs6000_darwin64_record_arg_advance_recurse (cum
, type
, 0);
4384 rs6000_darwin64_record_arg_advance_flush (cum
,
4385 size
* BITS_PER_UNIT
);
4388 else if (DEFAULT_ABI
== ABI_V4
)
4390 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
4391 && (mode
== SFmode
|| mode
== DFmode
))
4393 if (cum
->fregno
<= FP_ARG_V4_MAX_REG
)
4398 cum
->words
+= cum
->words
& 1;
4399 cum
->words
+= rs6000_arg_size (mode
, type
);
4404 int n_words
= rs6000_arg_size (mode
, type
);
4405 int gregno
= cum
->sysv_gregno
;
4407 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
4408 (r7,r8) or (r9,r10). As does any other 2 word item such
4409 as complex int due to a historical mistake. */
4411 gregno
+= (1 - gregno
) & 1;
4413 /* Multi-reg args are not split between registers and stack. */
4414 if (gregno
+ n_words
- 1 > GP_ARG_MAX_REG
)
4416 /* Long long and SPE vectors are aligned on the stack.
4417 So are other 2 word items such as complex int due to
4418 a historical mistake. */
4420 cum
->words
+= cum
->words
& 1;
4421 cum
->words
+= n_words
;
4424 /* Note: continuing to accumulate gregno past when we've started
4425 spilling to the stack indicates the fact that we've started
4426 spilling to the stack to expand_builtin_saveregs. */
4427 cum
->sysv_gregno
= gregno
+ n_words
;
4430 if (TARGET_DEBUG_ARG
)
4432 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
4433 cum
->words
, cum
->fregno
);
4434 fprintf (stderr
, "gregno = %2d, nargs = %4d, proto = %d, ",
4435 cum
->sysv_gregno
, cum
->nargs_prototype
, cum
->prototype
);
4436 fprintf (stderr
, "mode = %4s, named = %d\n",
4437 GET_MODE_NAME (mode
), named
);
4442 int n_words
= rs6000_arg_size (mode
, type
);
4443 int start_words
= cum
->words
;
4444 int align_words
= rs6000_parm_start (mode
, type
, start_words
);
4446 cum
->words
= align_words
+ n_words
;
4448 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
4449 && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
4450 cum
->fregno
+= (GET_MODE_SIZE (mode
) + 7) >> 3;
4452 if (TARGET_DEBUG_ARG
)
4454 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
4455 cum
->words
, cum
->fregno
);
4456 fprintf (stderr
, "nargs = %4d, proto = %d, mode = %4s, ",
4457 cum
->nargs_prototype
, cum
->prototype
, GET_MODE_NAME (mode
));
4458 fprintf (stderr
, "named = %d, align = %d, depth = %d\n",
4459 named
, align_words
- start_words
, depth
);
4465 spe_build_register_parallel (enum machine_mode mode
, int gregno
)
4472 r1
= gen_rtx_REG (DImode
, gregno
);
4473 r1
= gen_rtx_EXPR_LIST (VOIDmode
, r1
, const0_rtx
);
4474 return gen_rtx_PARALLEL (mode
, gen_rtvec (1, r1
));
4477 r1
= gen_rtx_REG (DImode
, gregno
);
4478 r1
= gen_rtx_EXPR_LIST (VOIDmode
, r1
, const0_rtx
);
4479 r3
= gen_rtx_REG (DImode
, gregno
+ 2);
4480 r3
= gen_rtx_EXPR_LIST (VOIDmode
, r3
, GEN_INT (8));
4481 return gen_rtx_PARALLEL (mode
, gen_rtvec (2, r1
, r3
));
4488 /* Determine where to put a SIMD argument on the SPE. */
4490 rs6000_spe_function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
4493 int gregno
= cum
->sysv_gregno
;
4495 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
4496 are passed and returned in a pair of GPRs for ABI compatibility. */
4497 if (TARGET_E500_DOUBLE
&& (mode
== DFmode
|| mode
== DCmode
))
4499 int n_words
= rs6000_arg_size (mode
, type
);
4501 /* Doubles go in an odd/even register pair (r5/r6, etc). */
4503 gregno
+= (1 - gregno
) & 1;
4505 /* Multi-reg args are not split between registers and stack. */
4506 if (gregno
+ n_words
- 1 > GP_ARG_MAX_REG
)
4509 return spe_build_register_parallel (mode
, gregno
);
4513 int n_words
= rs6000_arg_size (mode
, type
);
4515 /* SPE vectors are put in odd registers. */
4516 if (n_words
== 2 && (gregno
& 1) == 0)
4519 if (gregno
+ n_words
- 1 <= GP_ARG_MAX_REG
)
4522 enum machine_mode m
= SImode
;
4524 r1
= gen_rtx_REG (m
, gregno
);
4525 r1
= gen_rtx_EXPR_LIST (m
, r1
, const0_rtx
);
4526 r2
= gen_rtx_REG (m
, gregno
+ 1);
4527 r2
= gen_rtx_EXPR_LIST (m
, r2
, GEN_INT (4));
4528 return gen_rtx_PARALLEL (mode
, gen_rtvec (2, r1
, r2
));
4535 if (gregno
<= GP_ARG_MAX_REG
)
4536 return gen_rtx_REG (mode
, gregno
);
4542 /* A subroutine of rs6000_darwin64_record_arg. Assign the bits of the
4543 structure between cum->intoffset and bitpos to integer registers. */
4546 rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS
*cum
,
4547 HOST_WIDE_INT bitpos
, rtx rvec
[], int *k
)
4549 enum machine_mode mode
;
4551 unsigned int startbit
, endbit
;
4552 int this_regno
, intregs
, intoffset
;
4555 if (cum
->intoffset
== -1)
4558 intoffset
= cum
->intoffset
;
4559 cum
->intoffset
= -1;
4561 /* If this is the trailing part of a word, try to only load that
4562 much into the register. Otherwise load the whole register. Note
4563 that in the latter case we may pick up unwanted bits. It's not a
4564 problem at the moment but may wish to revisit. */
4566 if (intoffset
% BITS_PER_WORD
!= 0)
4568 mode
= mode_for_size (BITS_PER_WORD
- intoffset
% BITS_PER_WORD
,
4570 if (mode
== BLKmode
)
4572 /* We couldn't find an appropriate mode, which happens,
4573 e.g., in packed structs when there are 3 bytes to load.
4574 Back intoffset back to the beginning of the word in this
4576 intoffset
= intoffset
& -BITS_PER_WORD
;
4583 startbit
= intoffset
& -BITS_PER_WORD
;
4584 endbit
= (bitpos
+ BITS_PER_WORD
- 1) & -BITS_PER_WORD
;
4585 intregs
= (endbit
- startbit
) / BITS_PER_WORD
;
4586 this_regno
= cum
->words
+ intoffset
/ BITS_PER_WORD
;
4588 if (intregs
> 0 && intregs
> GP_ARG_NUM_REG
- this_regno
)
4591 intregs
= MIN (intregs
, GP_ARG_NUM_REG
- this_regno
);
4595 intoffset
/= BITS_PER_UNIT
;
4598 regno
= GP_ARG_MIN_REG
+ this_regno
;
4599 reg
= gen_rtx_REG (mode
, regno
);
4601 gen_rtx_EXPR_LIST (VOIDmode
, reg
, GEN_INT (intoffset
));
4604 intoffset
= (intoffset
| (UNITS_PER_WORD
-1)) + 1;
4608 while (intregs
> 0);
4611 /* Recursive workhorse for the following. */
4614 rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS
*cum
, tree type
,
4615 HOST_WIDE_INT startbitpos
, rtx rvec
[],
4620 for (f
= TYPE_FIELDS (type
); f
; f
= TREE_CHAIN (f
))
4621 if (TREE_CODE (f
) == FIELD_DECL
)
4623 HOST_WIDE_INT bitpos
= startbitpos
;
4624 tree ftype
= TREE_TYPE (f
);
4625 enum machine_mode mode
= TYPE_MODE (ftype
);
4627 if (DECL_SIZE (f
) != 0
4628 && host_integerp (bit_position (f
), 1))
4629 bitpos
+= int_bit_position (f
);
4631 /* ??? FIXME: else assume zero offset. */
4633 if (TREE_CODE (ftype
) == RECORD_TYPE
)
4634 rs6000_darwin64_record_arg_recurse (cum
, ftype
, bitpos
, rvec
, k
);
4635 else if (cum
->named
&& USE_FP_FOR_ARG_P (cum
, mode
, ftype
))
4640 case SCmode
: mode
= SFmode
; break;
4641 case DCmode
: mode
= DFmode
; break;
4642 case TCmode
: mode
= TFmode
; break;
4646 rs6000_darwin64_record_arg_flush (cum
, bitpos
, rvec
, k
);
4648 = gen_rtx_EXPR_LIST (VOIDmode
,
4649 gen_rtx_REG (mode
, cum
->fregno
++),
4650 GEN_INT (bitpos
/ BITS_PER_UNIT
));
4654 else if (cum
->named
&& USE_ALTIVEC_FOR_ARG_P (cum
, mode
, ftype
, 1))
4656 rs6000_darwin64_record_arg_flush (cum
, bitpos
, rvec
, k
);
4658 = gen_rtx_EXPR_LIST (VOIDmode
,
4659 gen_rtx_REG (mode
, cum
->vregno
++),
4660 GEN_INT (bitpos
/ BITS_PER_UNIT
));
4662 else if (cum
->intoffset
== -1)
4663 cum
->intoffset
= bitpos
;
4667 /* For the darwin64 ABI, we want to construct a PARALLEL consisting of
4668 the register(s) to be used for each field and subfield of a struct
4669 being passed by value, along with the offset of where the
4670 register's value may be found in the block. FP fields go in FP
4671 register, vector fields go in vector registers, and everything
4672 else goes in int registers, packed as in memory.
4674 This code is also used for function return values. RETVAL indicates
4675 whether this is the case.
4677 Much of this is taken from the Sparc V9 port, which has a similar
4678 calling convention. */
4681 rs6000_darwin64_record_arg (CUMULATIVE_ARGS
*orig_cum
, tree type
,
4682 int named
, bool retval
)
4684 rtx rvec
[FIRST_PSEUDO_REGISTER
];
4685 int k
= 1, kbase
= 1;
4686 HOST_WIDE_INT typesize
= int_size_in_bytes (type
);
4687 /* This is a copy; modifications are not visible to our caller. */
4688 CUMULATIVE_ARGS copy_cum
= *orig_cum
;
4689 CUMULATIVE_ARGS
*cum
= ©_cum
;
4691 /* Pad to 16 byte boundary if needed. */
4692 if (!retval
&& TYPE_ALIGN (type
) >= 2 * BITS_PER_WORD
4693 && (cum
->words
% 2) != 0)
4700 /* Put entries into rvec[] for individual FP and vector fields, and
4701 for the chunks of memory that go in int regs. Note we start at
4702 element 1; 0 is reserved for an indication of using memory, and
4703 may or may not be filled in below. */
4704 rs6000_darwin64_record_arg_recurse (cum
, type
, 0, rvec
, &k
);
4705 rs6000_darwin64_record_arg_flush (cum
, typesize
* BITS_PER_UNIT
, rvec
, &k
);
4707 /* If any part of the struct went on the stack put all of it there.
4708 This hack is because the generic code for
4709 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
4710 parts of the struct are not at the beginning. */
4714 return NULL_RTX
; /* doesn't go in registers at all */
4716 rvec
[0] = gen_rtx_EXPR_LIST (VOIDmode
, NULL_RTX
, const0_rtx
);
4718 if (k
> 1 || cum
->use_stack
)
4719 return gen_rtx_PARALLEL (BLKmode
, gen_rtvec_v (k
- kbase
, &rvec
[kbase
]));
4724 /* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
4727 rs6000_mixed_function_arg (enum machine_mode mode
, tree type
, int align_words
)
4731 rtx rvec
[GP_ARG_NUM_REG
+ 1];
4733 if (align_words
>= GP_ARG_NUM_REG
)
4736 n_units
= rs6000_arg_size (mode
, type
);
4738 /* Optimize the simple case where the arg fits in one gpr, except in
4739 the case of BLKmode due to assign_parms assuming that registers are
4740 BITS_PER_WORD wide. */
4742 || (n_units
== 1 && mode
!= BLKmode
))
4743 return gen_rtx_REG (mode
, GP_ARG_MIN_REG
+ align_words
);
4746 if (align_words
+ n_units
> GP_ARG_NUM_REG
)
4747 /* Not all of the arg fits in gprs. Say that it goes in memory too,
4748 using a magic NULL_RTX component.
4749 FIXME: This is not strictly correct. Only some of the arg
4750 belongs in memory, not all of it. However, there isn't any way
4751 to do this currently, apart from building rtx descriptions for
4752 the pieces of memory we want stored. Due to bugs in the generic
4753 code we can't use the normal function_arg_partial_nregs scheme
4754 with the PARALLEL arg description we emit here.
4755 In any case, the code to store the whole arg to memory is often
4756 more efficient than code to store pieces, and we know that space
4757 is available in the right place for the whole arg. */
4758 /* FIXME: This should be fixed since the conversion to
4759 TARGET_ARG_PARTIAL_BYTES. */
4760 rvec
[k
++] = gen_rtx_EXPR_LIST (VOIDmode
, NULL_RTX
, const0_rtx
);
4765 rtx r
= gen_rtx_REG (SImode
, GP_ARG_MIN_REG
+ align_words
);
4766 rtx off
= GEN_INT (i
++ * 4);
4767 rvec
[k
++] = gen_rtx_EXPR_LIST (VOIDmode
, r
, off
);
4769 while (++align_words
< GP_ARG_NUM_REG
&& --n_units
!= 0);
4771 return gen_rtx_PARALLEL (mode
, gen_rtvec_v (k
, rvec
));
4774 /* Determine where to put an argument to a function.
4775 Value is zero to push the argument on the stack,
4776 or a hard register in which to store the argument.
4778 MODE is the argument's machine mode.
4779 TYPE is the data type of the argument (as a tree).
4780 This is null for libcalls where that information may
4782 CUM is a variable of type CUMULATIVE_ARGS which gives info about
4783 the preceding args and about the function being called. It is
4784 not modified in this routine.
4785 NAMED is nonzero if this argument is a named parameter
4786 (otherwise it is an extra parameter matching an ellipsis).
4788 On RS/6000 the first eight words of non-FP are normally in registers
4789 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
4790 Under V.4, the first 8 FP args are in registers.
4792 If this is floating-point and no prototype is specified, we use
4793 both an FP and integer register (or possibly FP reg and stack). Library
4794 functions (when CALL_LIBCALL is set) always have the proper types for args,
4795 so we can pass the FP value just in one register. emit_library_function
4796 doesn't support PARALLEL anyway.
4798 Note that for args passed by reference, function_arg will be called
4799 with MODE and TYPE set to that of the pointer to the arg, not the arg
4803 function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
4804 tree type
, int named
)
4806 enum rs6000_abi abi
= DEFAULT_ABI
;
4808 /* Return a marker to indicate whether CR1 needs to set or clear the
4809 bit that V.4 uses to say fp args were passed in registers.
4810 Assume that we don't need the marker for software floating point,
4811 or compiler generated library calls. */
4812 if (mode
== VOIDmode
)
4815 && cum
->nargs_prototype
< 0
4816 && (cum
->call_cookie
& CALL_LIBCALL
) == 0
4817 && (cum
->prototype
|| TARGET_NO_PROTOTYPE
))
4819 /* For the SPE, we need to crxor CR6 always. */
4821 return GEN_INT (cum
->call_cookie
| CALL_V4_SET_FP_ARGS
);
4822 else if (TARGET_HARD_FLOAT
&& TARGET_FPRS
)
4823 return GEN_INT (cum
->call_cookie
4824 | ((cum
->fregno
== FP_ARG_MIN_REG
)
4825 ? CALL_V4_SET_FP_ARGS
4826 : CALL_V4_CLEAR_FP_ARGS
));
4829 return GEN_INT (cum
->call_cookie
);
4832 if (rs6000_darwin64_abi
&& mode
== BLKmode
4833 && TREE_CODE (type
) == RECORD_TYPE
)
4835 rtx rslt
= rs6000_darwin64_record_arg (cum
, type
, named
, false);
4836 if (rslt
!= NULL_RTX
)
4838 /* Else fall through to usual handling. */
4841 if (USE_ALTIVEC_FOR_ARG_P (cum
, mode
, type
, named
))
4842 if (TARGET_64BIT
&& ! cum
->prototype
)
4844 /* Vector parameters get passed in vector register
4845 and also in GPRs or memory, in absence of prototype. */
4848 align_words
= (cum
->words
+ 1) & ~1;
4850 if (align_words
>= GP_ARG_NUM_REG
)
4856 slot
= gen_rtx_REG (mode
, GP_ARG_MIN_REG
+ align_words
);
4858 return gen_rtx_PARALLEL (mode
,
4860 gen_rtx_EXPR_LIST (VOIDmode
,
4862 gen_rtx_EXPR_LIST (VOIDmode
,
4863 gen_rtx_REG (mode
, cum
->vregno
),
4867 return gen_rtx_REG (mode
, cum
->vregno
);
4868 else if (TARGET_ALTIVEC_ABI
4869 && (ALTIVEC_VECTOR_MODE (mode
)
4870 || (type
&& TREE_CODE (type
) == VECTOR_TYPE
4871 && int_size_in_bytes (type
) == 16)))
4873 if (named
|| abi
== ABI_V4
)
4877 /* Vector parameters to varargs functions under AIX or Darwin
4878 get passed in memory and possibly also in GPRs. */
4879 int align
, align_words
, n_words
;
4880 enum machine_mode part_mode
;
4882 /* Vector parameters must be 16-byte aligned. This places them at
4883 2 mod 4 in terms of words in 32-bit mode, since the parameter
4884 save area starts at offset 24 from the stack. In 64-bit mode,
4885 they just have to start on an even word, since the parameter
4886 save area is 16-byte aligned. */
4888 align
= (2 - cum
->words
) & 3;
4890 align
= cum
->words
& 1;
4891 align_words
= cum
->words
+ align
;
4893 /* Out of registers? Memory, then. */
4894 if (align_words
>= GP_ARG_NUM_REG
)
4897 if (TARGET_32BIT
&& TARGET_POWERPC64
)
4898 return rs6000_mixed_function_arg (mode
, type
, align_words
);
4900 /* The vector value goes in GPRs. Only the part of the
4901 value in GPRs is reported here. */
4903 n_words
= rs6000_arg_size (mode
, type
);
4904 if (align_words
+ n_words
> GP_ARG_NUM_REG
)
4905 /* Fortunately, there are only two possibilities, the value
4906 is either wholly in GPRs or half in GPRs and half not. */
4909 return gen_rtx_REG (part_mode
, GP_ARG_MIN_REG
+ align_words
);
4912 else if (TARGET_SPE_ABI
&& TARGET_SPE
4913 && (SPE_VECTOR_MODE (mode
)
4914 || (TARGET_E500_DOUBLE
&& (mode
== DFmode
4915 || mode
== DCmode
))))
4916 return rs6000_spe_function_arg (cum
, mode
, type
);
4918 else if (abi
== ABI_V4
)
4920 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
4921 && (mode
== SFmode
|| mode
== DFmode
))
4923 if (cum
->fregno
<= FP_ARG_V4_MAX_REG
)
4924 return gen_rtx_REG (mode
, cum
->fregno
);
4930 int n_words
= rs6000_arg_size (mode
, type
);
4931 int gregno
= cum
->sysv_gregno
;
4933 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
4934 (r7,r8) or (r9,r10). As does any other 2 word item such
4935 as complex int due to a historical mistake. */
4937 gregno
+= (1 - gregno
) & 1;
4939 /* Multi-reg args are not split between registers and stack. */
4940 if (gregno
+ n_words
- 1 > GP_ARG_MAX_REG
)
4943 if (TARGET_32BIT
&& TARGET_POWERPC64
)
4944 return rs6000_mixed_function_arg (mode
, type
,
4945 gregno
- GP_ARG_MIN_REG
);
4946 return gen_rtx_REG (mode
, gregno
);
4951 int align_words
= rs6000_parm_start (mode
, type
, cum
->words
);
4953 if (USE_FP_FOR_ARG_P (cum
, mode
, type
))
4955 rtx rvec
[GP_ARG_NUM_REG
+ 1];
4959 enum machine_mode fmode
= mode
;
4960 unsigned long n_fpreg
= (GET_MODE_SIZE (mode
) + 7) >> 3;
4962 if (cum
->fregno
+ n_fpreg
> FP_ARG_MAX_REG
+ 1)
4964 /* Currently, we only ever need one reg here because complex
4965 doubles are split. */
4966 gcc_assert (cum
->fregno
== FP_ARG_MAX_REG
&& fmode
== TFmode
);
4968 /* Long double split over regs and memory. */
4972 /* Do we also need to pass this arg in the parameter save
4975 && (cum
->nargs_prototype
<= 0
4976 || (DEFAULT_ABI
== ABI_AIX
4978 && align_words
>= GP_ARG_NUM_REG
)));
4980 if (!needs_psave
&& mode
== fmode
)
4981 return gen_rtx_REG (fmode
, cum
->fregno
);
4986 /* Describe the part that goes in gprs or the stack.
4987 This piece must come first, before the fprs. */
4988 if (align_words
< GP_ARG_NUM_REG
)
4990 unsigned long n_words
= rs6000_arg_size (mode
, type
);
4992 if (align_words
+ n_words
> GP_ARG_NUM_REG
4993 || (TARGET_32BIT
&& TARGET_POWERPC64
))
4995 /* If this is partially on the stack, then we only
4996 include the portion actually in registers here. */
4997 enum machine_mode rmode
= TARGET_32BIT
? SImode
: DImode
;
5000 if (align_words
+ n_words
> GP_ARG_NUM_REG
5001 && (TARGET_32BIT
&& TARGET_POWERPC64
))
5002 /* Not all of the arg fits in gprs. Say that it
5003 goes in memory too, using a magic NULL_RTX
5004 component. Also see comment in
5005 rs6000_mixed_function_arg for why the normal
5006 function_arg_partial_nregs scheme doesn't work
5008 rvec
[k
++] = gen_rtx_EXPR_LIST (VOIDmode
, NULL_RTX
,
5012 r
= gen_rtx_REG (rmode
,
5013 GP_ARG_MIN_REG
+ align_words
);
5014 off
= GEN_INT (i
++ * GET_MODE_SIZE (rmode
));
5015 rvec
[k
++] = gen_rtx_EXPR_LIST (VOIDmode
, r
, off
);
5017 while (++align_words
< GP_ARG_NUM_REG
&& --n_words
!= 0);
5021 /* The whole arg fits in gprs. */
5022 r
= gen_rtx_REG (mode
, GP_ARG_MIN_REG
+ align_words
);
5023 rvec
[k
++] = gen_rtx_EXPR_LIST (VOIDmode
, r
, const0_rtx
);
5027 /* It's entirely in memory. */
5028 rvec
[k
++] = gen_rtx_EXPR_LIST (VOIDmode
, NULL_RTX
, const0_rtx
);
5031 /* Describe where this piece goes in the fprs. */
5032 r
= gen_rtx_REG (fmode
, cum
->fregno
);
5033 rvec
[k
++] = gen_rtx_EXPR_LIST (VOIDmode
, r
, const0_rtx
);
5035 return gen_rtx_PARALLEL (mode
, gen_rtvec_v (k
, rvec
));
5037 else if (align_words
< GP_ARG_NUM_REG
)
5039 if (TARGET_32BIT
&& TARGET_POWERPC64
)
5040 return rs6000_mixed_function_arg (mode
, type
, align_words
);
5042 if (mode
== BLKmode
)
5045 return gen_rtx_REG (mode
, GP_ARG_MIN_REG
+ align_words
);
5052 /* For an arg passed partly in registers and partly in memory, this is
5053 the number of bytes passed in registers. For args passed entirely in
5054 registers or entirely in memory, zero. When an arg is described by a
5055 PARALLEL, perhaps using more than one register type, this function
5056 returns the number of bytes used by the first element of the PARALLEL. */
5059 rs6000_arg_partial_bytes (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
5060 tree type
, bool named
)
5065 if (DEFAULT_ABI
== ABI_V4
)
5068 if (USE_ALTIVEC_FOR_ARG_P (cum
, mode
, type
, named
)
5069 && cum
->nargs_prototype
>= 0)
5072 /* In this complicated case we just disable the partial_nregs code. */
5073 if (rs6000_darwin64_abi
&& mode
== BLKmode
5074 && TREE_CODE (type
) == RECORD_TYPE
5075 && int_size_in_bytes (type
) > 0)
5078 align_words
= rs6000_parm_start (mode
, type
, cum
->words
);
5080 if (USE_FP_FOR_ARG_P (cum
, mode
, type
)
5081 /* If we are passing this arg in the fixed parameter save area
5082 (gprs or memory) as well as fprs, then this function should
5083 return the number of bytes passed in the parameter save area
5084 rather than bytes passed in fprs. */
5086 && (cum
->nargs_prototype
<= 0
5087 || (DEFAULT_ABI
== ABI_AIX
5089 && align_words
>= GP_ARG_NUM_REG
))))
5091 if (cum
->fregno
+ ((GET_MODE_SIZE (mode
) + 7) >> 3) > FP_ARG_MAX_REG
+ 1)
5092 ret
= (FP_ARG_MAX_REG
+ 1 - cum
->fregno
) * 8;
5093 else if (cum
->nargs_prototype
>= 0)
5097 if (align_words
< GP_ARG_NUM_REG
5098 && GP_ARG_NUM_REG
< align_words
+ rs6000_arg_size (mode
, type
))
5099 ret
= (GP_ARG_NUM_REG
- align_words
) * (TARGET_32BIT
? 4 : 8);
5101 if (ret
!= 0 && TARGET_DEBUG_ARG
)
5102 fprintf (stderr
, "rs6000_arg_partial_bytes: %d\n", ret
);
5107 /* A C expression that indicates when an argument must be passed by
5108 reference. If nonzero for an argument, a copy of that argument is
5109 made in memory and a pointer to the argument is passed instead of
5110 the argument itself. The pointer is passed in whatever way is
5111 appropriate for passing a pointer to that type.
5113 Under V.4, aggregates and long double are passed by reference.
5115 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
5116 reference unless the AltiVec vector extension ABI is in force.
5118 As an extension to all ABIs, variable sized types are passed by
5122 rs6000_pass_by_reference (CUMULATIVE_ARGS
*cum ATTRIBUTE_UNUSED
,
5123 enum machine_mode mode
, tree type
,
5124 bool named ATTRIBUTE_UNUSED
)
5126 if (DEFAULT_ABI
== ABI_V4
&& mode
== TFmode
)
5128 if (TARGET_DEBUG_ARG
)
5129 fprintf (stderr
, "function_arg_pass_by_reference: V4 long double\n");
5136 if (DEFAULT_ABI
== ABI_V4
&& AGGREGATE_TYPE_P (type
))
5138 if (TARGET_DEBUG_ARG
)
5139 fprintf (stderr
, "function_arg_pass_by_reference: V4 aggregate\n");
5143 if (int_size_in_bytes (type
) < 0)
5145 if (TARGET_DEBUG_ARG
)
5146 fprintf (stderr
, "function_arg_pass_by_reference: variable size\n");
5150 /* Allow -maltivec -mabi=no-altivec without warning. Altivec vector
5151 modes only exist for GCC vector types if -maltivec. */
5152 if (TARGET_32BIT
&& !TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
5154 if (TARGET_DEBUG_ARG
)
5155 fprintf (stderr
, "function_arg_pass_by_reference: AltiVec\n");
5159 /* Pass synthetic vectors in memory. */
5160 if (TREE_CODE (type
) == VECTOR_TYPE
5161 && int_size_in_bytes (type
) > (TARGET_ALTIVEC_ABI
? 16 : 8))
5163 static bool warned_for_pass_big_vectors
= false;
5164 if (TARGET_DEBUG_ARG
)
5165 fprintf (stderr
, "function_arg_pass_by_reference: synthetic vector\n");
5166 if (!warned_for_pass_big_vectors
)
5168 warning (0, "GCC vector passed by reference: "
5169 "non-standard ABI extension with no compatibility guarantee");
5170 warned_for_pass_big_vectors
= true;
5179 rs6000_move_block_from_reg (int regno
, rtx x
, int nregs
)
5182 enum machine_mode reg_mode
= TARGET_32BIT
? SImode
: DImode
;
5187 for (i
= 0; i
< nregs
; i
++)
5189 rtx tem
= adjust_address_nv (x
, reg_mode
, i
* GET_MODE_SIZE (reg_mode
));
5190 if (reload_completed
)
5192 if (! strict_memory_address_p (reg_mode
, XEXP (tem
, 0)))
5195 tem
= simplify_gen_subreg (reg_mode
, x
, BLKmode
,
5196 i
* GET_MODE_SIZE (reg_mode
));
5199 tem
= replace_equiv_address (tem
, XEXP (tem
, 0));
5203 emit_move_insn (tem
, gen_rtx_REG (reg_mode
, regno
+ i
));
5207 /* Perform any needed actions needed for a function that is receiving a
5208 variable number of arguments.
5212 MODE and TYPE are the mode and type of the current parameter.
5214 PRETEND_SIZE is a variable that should be set to the amount of stack
5215 that must be pushed by the prolog to pretend that our caller pushed
5218 Normally, this macro will push all remaining incoming registers on the
5219 stack and set PRETEND_SIZE to the length of the registers pushed. */
5222 setup_incoming_varargs (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
5223 tree type
, int *pretend_size ATTRIBUTE_UNUSED
,
5226 CUMULATIVE_ARGS next_cum
;
5227 int reg_size
= TARGET_32BIT
? 4 : 8;
5228 rtx save_area
= NULL_RTX
, mem
;
5229 int first_reg_offset
, set
;
5231 /* Skip the last named argument. */
5233 function_arg_advance (&next_cum
, mode
, type
, 1, 0);
5235 if (DEFAULT_ABI
== ABI_V4
)
5237 first_reg_offset
= next_cum
.sysv_gregno
- GP_ARG_MIN_REG
;
5241 int gpr_reg_num
= 0, gpr_size
= 0, fpr_size
= 0;
5242 HOST_WIDE_INT offset
= 0;
5244 /* Try to optimize the size of the varargs save area.
5245 The ABI requires that ap.reg_save_area is doubleword
5246 aligned, but we don't need to allocate space for all
5247 the bytes, only those to which we actually will save
5249 if (cfun
->va_list_gpr_size
&& first_reg_offset
< GP_ARG_NUM_REG
)
5250 gpr_reg_num
= GP_ARG_NUM_REG
- first_reg_offset
;
5251 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
5252 && next_cum
.fregno
<= FP_ARG_V4_MAX_REG
5253 && cfun
->va_list_fpr_size
)
5256 fpr_size
= (next_cum
.fregno
- FP_ARG_MIN_REG
)
5257 * UNITS_PER_FP_WORD
;
5258 if (cfun
->va_list_fpr_size
5259 < FP_ARG_V4_MAX_REG
+ 1 - next_cum
.fregno
)
5260 fpr_size
+= cfun
->va_list_fpr_size
* UNITS_PER_FP_WORD
;
5262 fpr_size
+= (FP_ARG_V4_MAX_REG
+ 1 - next_cum
.fregno
)
5263 * UNITS_PER_FP_WORD
;
5267 offset
= -((first_reg_offset
* reg_size
) & ~7);
5268 if (!fpr_size
&& gpr_reg_num
> cfun
->va_list_gpr_size
)
5270 gpr_reg_num
= cfun
->va_list_gpr_size
;
5271 if (reg_size
== 4 && (first_reg_offset
& 1))
5274 gpr_size
= (gpr_reg_num
* reg_size
+ 7) & ~7;
5277 offset
= - (int) (next_cum
.fregno
- FP_ARG_MIN_REG
)
5279 - (int) (GP_ARG_NUM_REG
* reg_size
);
5281 if (gpr_size
+ fpr_size
)
5284 = assign_stack_local (BLKmode
, gpr_size
+ fpr_size
, 64);
5285 gcc_assert (GET_CODE (reg_save_area
) == MEM
);
5286 reg_save_area
= XEXP (reg_save_area
, 0);
5287 if (GET_CODE (reg_save_area
) == PLUS
)
5289 gcc_assert (XEXP (reg_save_area
, 0)
5290 == virtual_stack_vars_rtx
);
5291 gcc_assert (GET_CODE (XEXP (reg_save_area
, 1)) == CONST_INT
);
5292 offset
+= INTVAL (XEXP (reg_save_area
, 1));
5295 gcc_assert (reg_save_area
== virtual_stack_vars_rtx
);
5298 cfun
->machine
->varargs_save_offset
= offset
;
5299 save_area
= plus_constant (virtual_stack_vars_rtx
, offset
);
5304 first_reg_offset
= next_cum
.words
;
5305 save_area
= virtual_incoming_args_rtx
;
5307 if (targetm
.calls
.must_pass_in_stack (mode
, type
))
5308 first_reg_offset
+= rs6000_arg_size (TYPE_MODE (type
), type
);
5311 set
= get_varargs_alias_set ();
5312 if (! no_rtl
&& first_reg_offset
< GP_ARG_NUM_REG
5313 && cfun
->va_list_gpr_size
)
5315 int nregs
= GP_ARG_NUM_REG
- first_reg_offset
;
5317 if (va_list_gpr_counter_field
)
5319 /* V4 va_list_gpr_size counts number of registers needed. */
5320 if (nregs
> cfun
->va_list_gpr_size
)
5321 nregs
= cfun
->va_list_gpr_size
;
5325 /* char * va_list instead counts number of bytes needed. */
5326 if (nregs
> cfun
->va_list_gpr_size
/ reg_size
)
5327 nregs
= cfun
->va_list_gpr_size
/ reg_size
;
5330 mem
= gen_rtx_MEM (BLKmode
,
5331 plus_constant (save_area
,
5332 first_reg_offset
* reg_size
)),
5333 set_mem_alias_set (mem
, set
);
5334 set_mem_align (mem
, BITS_PER_WORD
);
5336 rs6000_move_block_from_reg (GP_ARG_MIN_REG
+ first_reg_offset
, mem
,
5340 /* Save FP registers if needed. */
5341 if (DEFAULT_ABI
== ABI_V4
5342 && TARGET_HARD_FLOAT
&& TARGET_FPRS
5344 && next_cum
.fregno
<= FP_ARG_V4_MAX_REG
5345 && cfun
->va_list_fpr_size
)
5347 int fregno
= next_cum
.fregno
, nregs
;
5348 rtx cr1
= gen_rtx_REG (CCmode
, CR1_REGNO
);
5349 rtx lab
= gen_label_rtx ();
5350 int off
= (GP_ARG_NUM_REG
* reg_size
) + ((fregno
- FP_ARG_MIN_REG
)
5351 * UNITS_PER_FP_WORD
);
5354 (gen_rtx_SET (VOIDmode
,
5356 gen_rtx_IF_THEN_ELSE (VOIDmode
,
5357 gen_rtx_NE (VOIDmode
, cr1
,
5359 gen_rtx_LABEL_REF (VOIDmode
, lab
),
5363 fregno
<= FP_ARG_V4_MAX_REG
&& nregs
< cfun
->va_list_fpr_size
;
5364 fregno
++, off
+= UNITS_PER_FP_WORD
, nregs
++)
5366 mem
= gen_rtx_MEM (DFmode
, plus_constant (save_area
, off
));
5367 set_mem_alias_set (mem
, set
);
5368 set_mem_align (mem
, GET_MODE_ALIGNMENT (DFmode
));
5369 emit_move_insn (mem
, gen_rtx_REG (DFmode
, fregno
));
5376 /* Create the va_list data type. */
5379 rs6000_build_builtin_va_list (void)
5381 tree f_gpr
, f_fpr
, f_res
, f_ovf
, f_sav
, record
, type_decl
;
5383 /* For AIX, prefer 'char *' because that's what the system
5384 header files like. */
5385 if (DEFAULT_ABI
!= ABI_V4
)
5386 return build_pointer_type (char_type_node
);
5388 record
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
5389 type_decl
= build_decl (TYPE_DECL
, get_identifier ("__va_list_tag"), record
);
5391 f_gpr
= build_decl (FIELD_DECL
, get_identifier ("gpr"),
5392 unsigned_char_type_node
);
5393 f_fpr
= build_decl (FIELD_DECL
, get_identifier ("fpr"),
5394 unsigned_char_type_node
);
5395 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
5397 f_res
= build_decl (FIELD_DECL
, get_identifier ("reserved"),
5398 short_unsigned_type_node
);
5399 f_ovf
= build_decl (FIELD_DECL
, get_identifier ("overflow_arg_area"),
5401 f_sav
= build_decl (FIELD_DECL
, get_identifier ("reg_save_area"),
5404 va_list_gpr_counter_field
= f_gpr
;
5405 va_list_fpr_counter_field
= f_fpr
;
5407 DECL_FIELD_CONTEXT (f_gpr
) = record
;
5408 DECL_FIELD_CONTEXT (f_fpr
) = record
;
5409 DECL_FIELD_CONTEXT (f_res
) = record
;
5410 DECL_FIELD_CONTEXT (f_ovf
) = record
;
5411 DECL_FIELD_CONTEXT (f_sav
) = record
;
5413 TREE_CHAIN (record
) = type_decl
;
5414 TYPE_NAME (record
) = type_decl
;
5415 TYPE_FIELDS (record
) = f_gpr
;
5416 TREE_CHAIN (f_gpr
) = f_fpr
;
5417 TREE_CHAIN (f_fpr
) = f_res
;
5418 TREE_CHAIN (f_res
) = f_ovf
;
5419 TREE_CHAIN (f_ovf
) = f_sav
;
5421 layout_type (record
);
5423 /* The correct type is an array type of one element. */
5424 return build_array_type (record
, build_index_type (size_zero_node
));
5427 /* Implement va_start. */
5430 rs6000_va_start (tree valist
, rtx nextarg
)
5432 HOST_WIDE_INT words
, n_gpr
, n_fpr
;
5433 tree f_gpr
, f_fpr
, f_res
, f_ovf
, f_sav
;
5434 tree gpr
, fpr
, ovf
, sav
, t
;
5436 /* Only SVR4 needs something special. */
5437 if (DEFAULT_ABI
!= ABI_V4
)
5439 std_expand_builtin_va_start (valist
, nextarg
);
5443 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
5444 f_fpr
= TREE_CHAIN (f_gpr
);
5445 f_res
= TREE_CHAIN (f_fpr
);
5446 f_ovf
= TREE_CHAIN (f_res
);
5447 f_sav
= TREE_CHAIN (f_ovf
);
5449 valist
= build_va_arg_indirect_ref (valist
);
5450 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
, NULL_TREE
);
5451 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
, NULL_TREE
);
5452 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
, NULL_TREE
);
5453 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
, NULL_TREE
);
5455 /* Count number of gp and fp argument registers used. */
5456 words
= current_function_args_info
.words
;
5457 n_gpr
= MIN (current_function_args_info
.sysv_gregno
- GP_ARG_MIN_REG
,
5459 n_fpr
= MIN (current_function_args_info
.fregno
- FP_ARG_MIN_REG
,
5462 if (TARGET_DEBUG_ARG
)
5463 fprintf (stderr
, "va_start: words = "HOST_WIDE_INT_PRINT_DEC
", n_gpr = "
5464 HOST_WIDE_INT_PRINT_DEC
", n_fpr = "HOST_WIDE_INT_PRINT_DEC
"\n",
5465 words
, n_gpr
, n_fpr
);
5467 if (cfun
->va_list_gpr_size
)
5469 t
= build (MODIFY_EXPR
, TREE_TYPE (gpr
), gpr
,
5470 build_int_cst (NULL_TREE
, n_gpr
));
5471 TREE_SIDE_EFFECTS (t
) = 1;
5472 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5475 if (cfun
->va_list_fpr_size
)
5477 t
= build (MODIFY_EXPR
, TREE_TYPE (fpr
), fpr
,
5478 build_int_cst (NULL_TREE
, n_fpr
));
5479 TREE_SIDE_EFFECTS (t
) = 1;
5480 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5483 /* Find the overflow area. */
5484 t
= make_tree (TREE_TYPE (ovf
), virtual_incoming_args_rtx
);
5486 t
= build (PLUS_EXPR
, TREE_TYPE (ovf
), t
,
5487 build_int_cst (NULL_TREE
, words
* UNITS_PER_WORD
));
5488 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
5489 TREE_SIDE_EFFECTS (t
) = 1;
5490 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5492 /* If there were no va_arg invocations, don't set up the register
5494 if (!cfun
->va_list_gpr_size
5495 && !cfun
->va_list_fpr_size
5496 && n_gpr
< GP_ARG_NUM_REG
5497 && n_fpr
< FP_ARG_V4_MAX_REG
)
5500 /* Find the register save area. */
5501 t
= make_tree (TREE_TYPE (sav
), virtual_stack_vars_rtx
);
5502 if (cfun
->machine
->varargs_save_offset
)
5503 t
= build (PLUS_EXPR
, TREE_TYPE (sav
), t
,
5504 build_int_cst (NULL_TREE
, cfun
->machine
->varargs_save_offset
));
5505 t
= build (MODIFY_EXPR
, TREE_TYPE (sav
), sav
, t
);
5506 TREE_SIDE_EFFECTS (t
) = 1;
5507 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5510 /* Implement va_arg. */
5513 rs6000_gimplify_va_arg (tree valist
, tree type
, tree
*pre_p
, tree
*post_p
)
5515 tree f_gpr
, f_fpr
, f_res
, f_ovf
, f_sav
;
5516 tree gpr
, fpr
, ovf
, sav
, reg
, t
, u
;
5517 int size
, rsize
, n_reg
, sav_ofs
, sav_scale
;
5518 tree lab_false
, lab_over
, addr
;
5520 tree ptrtype
= build_pointer_type (type
);
5522 if (pass_by_reference (NULL
, TYPE_MODE (type
), type
, false))
5524 t
= rs6000_gimplify_va_arg (valist
, ptrtype
, pre_p
, post_p
);
5525 return build_va_arg_indirect_ref (t
);
5528 if (DEFAULT_ABI
!= ABI_V4
)
5530 if (targetm
.calls
.split_complex_arg
&& TREE_CODE (type
) == COMPLEX_TYPE
)
5532 tree elem_type
= TREE_TYPE (type
);
5533 enum machine_mode elem_mode
= TYPE_MODE (elem_type
);
5534 int elem_size
= GET_MODE_SIZE (elem_mode
);
5536 if (elem_size
< UNITS_PER_WORD
)
5538 tree real_part
, imag_part
;
5539 tree post
= NULL_TREE
;
5541 real_part
= rs6000_gimplify_va_arg (valist
, elem_type
, pre_p
,
5543 /* Copy the value into a temporary, lest the formal temporary
5544 be reused out from under us. */
5545 real_part
= get_initialized_tmp_var (real_part
, pre_p
, &post
);
5546 append_to_statement_list (post
, pre_p
);
5548 imag_part
= rs6000_gimplify_va_arg (valist
, elem_type
, pre_p
,
5551 return build (COMPLEX_EXPR
, type
, real_part
, imag_part
);
5555 return std_gimplify_va_arg_expr (valist
, type
, pre_p
, post_p
);
5558 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
5559 f_fpr
= TREE_CHAIN (f_gpr
);
5560 f_res
= TREE_CHAIN (f_fpr
);
5561 f_ovf
= TREE_CHAIN (f_res
);
5562 f_sav
= TREE_CHAIN (f_ovf
);
5564 valist
= build_va_arg_indirect_ref (valist
);
5565 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
, NULL_TREE
);
5566 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
, NULL_TREE
);
5567 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
, NULL_TREE
);
5568 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
, NULL_TREE
);
5570 size
= int_size_in_bytes (type
);
5571 rsize
= (size
+ 3) / 4;
5574 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
5575 && (TYPE_MODE (type
) == SFmode
|| TYPE_MODE (type
) == DFmode
))
5577 /* FP args go in FP registers, if present. */
5582 if (TYPE_MODE (type
) == DFmode
)
5587 /* Otherwise into GP registers. */
5596 /* Pull the value out of the saved registers.... */
5599 addr
= create_tmp_var (ptr_type_node
, "addr");
5600 DECL_POINTER_ALIAS_SET (addr
) = get_varargs_alias_set ();
5602 /* AltiVec vectors never go in registers when -mabi=altivec. */
5603 if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (TYPE_MODE (type
)))
5607 lab_false
= create_artificial_label ();
5608 lab_over
= create_artificial_label ();
5610 /* Long long and SPE vectors are aligned in the registers.
5611 As are any other 2 gpr item such as complex int due to a
5612 historical mistake. */
5616 u
= build2 (BIT_AND_EXPR
, TREE_TYPE (reg
), reg
,
5617 size_int (n_reg
- 1));
5618 u
= build2 (POSTINCREMENT_EXPR
, TREE_TYPE (reg
), reg
, u
);
5621 t
= fold_convert (TREE_TYPE (reg
), size_int (8 - n_reg
+ 1));
5622 t
= build2 (GE_EXPR
, boolean_type_node
, u
, t
);
5623 u
= build1 (GOTO_EXPR
, void_type_node
, lab_false
);
5624 t
= build3 (COND_EXPR
, void_type_node
, t
, u
, NULL_TREE
);
5625 gimplify_and_add (t
, pre_p
);
5629 t
= build2 (PLUS_EXPR
, ptr_type_node
, sav
, size_int (sav_ofs
));
5631 u
= build2 (POSTINCREMENT_EXPR
, TREE_TYPE (reg
), reg
, size_int (n_reg
));
5632 u
= build1 (CONVERT_EXPR
, integer_type_node
, u
);
5633 u
= build2 (MULT_EXPR
, integer_type_node
, u
, size_int (sav_scale
));
5634 t
= build2 (PLUS_EXPR
, ptr_type_node
, t
, u
);
5636 t
= build2 (MODIFY_EXPR
, void_type_node
, addr
, t
);
5637 gimplify_and_add (t
, pre_p
);
5639 t
= build1 (GOTO_EXPR
, void_type_node
, lab_over
);
5640 gimplify_and_add (t
, pre_p
);
5642 t
= build1 (LABEL_EXPR
, void_type_node
, lab_false
);
5643 append_to_statement_list (t
, pre_p
);
5647 /* Ensure that we don't find any more args in regs.
5648 Alignment has taken care of the n_reg == 2 case. */
5649 t
= build (MODIFY_EXPR
, TREE_TYPE (reg
), reg
, size_int (8));
5650 gimplify_and_add (t
, pre_p
);
5654 /* ... otherwise out of the overflow area. */
5656 /* Care for on-stack alignment if needed. */
5660 t
= build2 (PLUS_EXPR
, TREE_TYPE (t
), t
, size_int (align
- 1));
5661 t
= build2 (BIT_AND_EXPR
, TREE_TYPE (t
), t
,
5662 build_int_cst (NULL_TREE
, -align
));
5664 gimplify_expr (&t
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
5666 u
= build2 (MODIFY_EXPR
, void_type_node
, addr
, t
);
5667 gimplify_and_add (u
, pre_p
);
5669 t
= build2 (PLUS_EXPR
, TREE_TYPE (t
), t
, size_int (size
));
5670 t
= build2 (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
5671 gimplify_and_add (t
, pre_p
);
5675 t
= build1 (LABEL_EXPR
, void_type_node
, lab_over
);
5676 append_to_statement_list (t
, pre_p
);
5679 addr
= fold_convert (ptrtype
, addr
);
5680 return build_va_arg_indirect_ref (addr
);
5686 def_builtin (int mask
, const char *name
, tree type
, int code
)
5688 if (mask
& target_flags
)
5690 if (rs6000_builtin_decls
[code
])
5693 rs6000_builtin_decls
[code
] =
5694 lang_hooks
.builtin_function (name
, type
, code
, BUILT_IN_MD
,
5699 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
5701 static const struct builtin_description bdesc_3arg
[] =
5703 { MASK_ALTIVEC
, CODE_FOR_altivec_vmaddfp
, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP
},
5704 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhaddshs
, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS
},
5705 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhraddshs
, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS
},
5706 { MASK_ALTIVEC
, CODE_FOR_altivec_vmladduhm
, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM
},
5707 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumubm
, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM
},
5708 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsummbm
, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM
},
5709 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhm
, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM
},
5710 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshm
, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM
},
5711 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhs
, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS
},
5712 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshs
, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS
},
5713 { MASK_ALTIVEC
, CODE_FOR_altivec_vnmsubfp
, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP
},
5714 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_v4sf
, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF
},
5715 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_v4si
, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI
},
5716 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_v8hi
, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI
},
5717 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_v16qi
, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI
},
5718 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_v4sf
, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF
},
5719 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_v4si
, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI
},
5720 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_v8hi
, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI
},
5721 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_v16qi
, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI
},
5722 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_v16qi
, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI
},
5723 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_v8hi
, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI
},
5724 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_v4si
, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI
},
5725 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_v4sf
, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF
},
5727 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_madd", ALTIVEC_BUILTIN_VEC_MADD
},
5728 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_madds", ALTIVEC_BUILTIN_VEC_MADDS
},
5729 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_mladd", ALTIVEC_BUILTIN_VEC_MLADD
},
5730 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_mradds", ALTIVEC_BUILTIN_VEC_MRADDS
},
5731 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_msum", ALTIVEC_BUILTIN_VEC_MSUM
},
5732 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmsumshm", ALTIVEC_BUILTIN_VEC_VMSUMSHM
},
5733 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmsumuhm", ALTIVEC_BUILTIN_VEC_VMSUMUHM
},
5734 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmsummbm", ALTIVEC_BUILTIN_VEC_VMSUMMBM
},
5735 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmsumubm", ALTIVEC_BUILTIN_VEC_VMSUMUBM
},
5736 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_msums", ALTIVEC_BUILTIN_VEC_MSUMS
},
5737 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmsumshs", ALTIVEC_BUILTIN_VEC_VMSUMSHS
},
5738 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS
},
5739 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB
},
5740 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM
},
5741 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL
},
5744 /* DST operations: void foo (void *, const int, const char). */
5746 static const struct builtin_description bdesc_dst
[] =
5748 { MASK_ALTIVEC
, CODE_FOR_altivec_dst
, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST
},
5749 { MASK_ALTIVEC
, CODE_FOR_altivec_dstt
, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT
},
5750 { MASK_ALTIVEC
, CODE_FOR_altivec_dstst
, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST
},
5751 { MASK_ALTIVEC
, CODE_FOR_altivec_dststt
, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT
},
5753 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_dst", ALTIVEC_BUILTIN_VEC_DST
},
5754 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_dstt", ALTIVEC_BUILTIN_VEC_DSTT
},
5755 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_dstst", ALTIVEC_BUILTIN_VEC_DSTST
},
5756 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_dststt", ALTIVEC_BUILTIN_VEC_DSTSTT
}
5759 /* Simple binary operations: VECc = foo (VECa, VECb). */
5761 static struct builtin_description bdesc_2arg
[] =
5763 { MASK_ALTIVEC
, CODE_FOR_addv16qi3
, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM
},
5764 { MASK_ALTIVEC
, CODE_FOR_addv8hi3
, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM
},
5765 { MASK_ALTIVEC
, CODE_FOR_addv4si3
, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM
},
5766 { MASK_ALTIVEC
, CODE_FOR_addv4sf3
, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP
},
5767 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddcuw
, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW
},
5768 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddubs
, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS
},
5769 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsbs
, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS
},
5770 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduhs
, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS
},
5771 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddshs
, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS
},
5772 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduws
, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS
},
5773 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsws
, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS
},
5774 { MASK_ALTIVEC
, CODE_FOR_andv4si3
, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND
},
5775 { MASK_ALTIVEC
, CODE_FOR_andcv4si3
, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC
},
5776 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgub
, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB
},
5777 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsb
, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB
},
5778 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguh
, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH
},
5779 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsh
, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH
},
5780 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguw
, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW
},
5781 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsw
, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW
},
5782 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfux
, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX
},
5783 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfsx
, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX
},
5784 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpbfp
, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP
},
5785 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequb
, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB
},
5786 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequh
, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH
},
5787 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequw
, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW
},
5788 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpeqfp
, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP
},
5789 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgefp
, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP
},
5790 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtub
, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB
},
5791 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsb
, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB
},
5792 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuh
, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH
},
5793 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsh
, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH
},
5794 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuw
, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW
},
5795 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsw
, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW
},
5796 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtfp
, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP
},
5797 { MASK_ALTIVEC
, CODE_FOR_altivec_vctsxs
, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS
},
5798 { MASK_ALTIVEC
, CODE_FOR_altivec_vctuxs
, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS
},
5799 { MASK_ALTIVEC
, CODE_FOR_umaxv16qi3
, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB
},
5800 { MASK_ALTIVEC
, CODE_FOR_smaxv16qi3
, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB
},
5801 { MASK_ALTIVEC
, CODE_FOR_umaxv8hi3
, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH
},
5802 { MASK_ALTIVEC
, CODE_FOR_smaxv8hi3
, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH
},
5803 { MASK_ALTIVEC
, CODE_FOR_umaxv4si3
, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW
},
5804 { MASK_ALTIVEC
, CODE_FOR_smaxv4si3
, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW
},
5805 { MASK_ALTIVEC
, CODE_FOR_smaxv4sf3
, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP
},
5806 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghb
, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB
},
5807 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghh
, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH
},
5808 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghw
, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW
},
5809 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglb
, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB
},
5810 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglh
, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH
},
5811 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglw
, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW
},
5812 { MASK_ALTIVEC
, CODE_FOR_uminv16qi3
, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB
},
5813 { MASK_ALTIVEC
, CODE_FOR_sminv16qi3
, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB
},
5814 { MASK_ALTIVEC
, CODE_FOR_uminv8hi3
, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH
},
5815 { MASK_ALTIVEC
, CODE_FOR_sminv8hi3
, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH
},
5816 { MASK_ALTIVEC
, CODE_FOR_uminv4si3
, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW
},
5817 { MASK_ALTIVEC
, CODE_FOR_sminv4si3
, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW
},
5818 { MASK_ALTIVEC
, CODE_FOR_sminv4sf3
, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP
},
5819 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleub
, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB
},
5820 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesb
, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB
},
5821 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleuh
, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH
},
5822 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesh
, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH
},
5823 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuloub
, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB
},
5824 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosb
, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB
},
5825 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulouh
, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH
},
5826 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosh
, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH
},
5827 { MASK_ALTIVEC
, CODE_FOR_altivec_norv4si3
, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR
},
5828 { MASK_ALTIVEC
, CODE_FOR_iorv4si3
, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR
},
5829 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhum
, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM
},
5830 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwum
, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM
},
5831 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkpx
, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX
},
5832 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhss
, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS
},
5833 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshss
, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS
},
5834 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwss
, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS
},
5835 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswss
, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS
},
5836 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhus
, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS
},
5837 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshus
, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS
},
5838 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwus
, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS
},
5839 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswus
, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS
},
5840 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlb
, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB
},
5841 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlh
, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH
},
5842 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlw
, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW
},
5843 { MASK_ALTIVEC
, CODE_FOR_altivec_vslb
, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB
},
5844 { MASK_ALTIVEC
, CODE_FOR_altivec_vslh
, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH
},
5845 { MASK_ALTIVEC
, CODE_FOR_altivec_vslw
, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW
},
5846 { MASK_ALTIVEC
, CODE_FOR_altivec_vsl
, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL
},
5847 { MASK_ALTIVEC
, CODE_FOR_altivec_vslo
, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO
},
5848 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltb
, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB
},
5849 { MASK_ALTIVEC
, CODE_FOR_altivec_vsplth
, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH
},
5850 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltw
, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW
},
5851 { MASK_ALTIVEC
, CODE_FOR_lshrv16qi3
, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB
},
5852 { MASK_ALTIVEC
, CODE_FOR_lshrv8hi3
, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH
},
5853 { MASK_ALTIVEC
, CODE_FOR_lshrv4si3
, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW
},
5854 { MASK_ALTIVEC
, CODE_FOR_ashrv16qi3
, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB
},
5855 { MASK_ALTIVEC
, CODE_FOR_ashrv8hi3
, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH
},
5856 { MASK_ALTIVEC
, CODE_FOR_ashrv4si3
, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW
},
5857 { MASK_ALTIVEC
, CODE_FOR_altivec_vsr
, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR
},
5858 { MASK_ALTIVEC
, CODE_FOR_altivec_vsro
, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO
},
5859 { MASK_ALTIVEC
, CODE_FOR_subv16qi3
, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM
},
5860 { MASK_ALTIVEC
, CODE_FOR_subv8hi3
, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM
},
5861 { MASK_ALTIVEC
, CODE_FOR_subv4si3
, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM
},
5862 { MASK_ALTIVEC
, CODE_FOR_subv4sf3
, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP
},
5863 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubcuw
, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW
},
5864 { MASK_ALTIVEC
, CODE_FOR_altivec_vsububs
, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS
},
5865 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsbs
, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS
},
5866 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuhs
, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS
},
5867 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubshs
, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS
},
5868 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuws
, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS
},
5869 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsws
, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS
},
5870 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4ubs
, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS
},
5871 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4sbs
, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS
},
5872 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4shs
, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS
},
5873 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum2sws
, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS
},
5874 { MASK_ALTIVEC
, CODE_FOR_altivec_vsumsws
, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS
},
5875 { MASK_ALTIVEC
, CODE_FOR_xorv4si3
, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR
},
5877 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_add", ALTIVEC_BUILTIN_VEC_ADD
},
5878 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vaddfp", ALTIVEC_BUILTIN_VEC_VADDFP
},
5879 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vadduwm", ALTIVEC_BUILTIN_VEC_VADDUWM
},
5880 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vadduhm", ALTIVEC_BUILTIN_VEC_VADDUHM
},
5881 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vaddubm", ALTIVEC_BUILTIN_VEC_VADDUBM
},
5882 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_addc", ALTIVEC_BUILTIN_VEC_ADDC
},
5883 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_adds", ALTIVEC_BUILTIN_VEC_ADDS
},
5884 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vaddsws", ALTIVEC_BUILTIN_VEC_VADDSWS
},
5885 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vadduws", ALTIVEC_BUILTIN_VEC_VADDUWS
},
5886 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vaddshs", ALTIVEC_BUILTIN_VEC_VADDSHS
},
5887 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vadduhs", ALTIVEC_BUILTIN_VEC_VADDUHS
},
5888 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vaddsbs", ALTIVEC_BUILTIN_VEC_VADDSBS
},
5889 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vaddubs", ALTIVEC_BUILTIN_VEC_VADDUBS
},
5890 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_and", ALTIVEC_BUILTIN_VEC_AND
},
5891 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_andc", ALTIVEC_BUILTIN_VEC_ANDC
},
5892 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_avg", ALTIVEC_BUILTIN_VEC_AVG
},
5893 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vavgsw", ALTIVEC_BUILTIN_VEC_VAVGSW
},
5894 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vavguw", ALTIVEC_BUILTIN_VEC_VAVGUW
},
5895 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vavgsh", ALTIVEC_BUILTIN_VEC_VAVGSH
},
5896 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vavguh", ALTIVEC_BUILTIN_VEC_VAVGUH
},
5897 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vavgsb", ALTIVEC_BUILTIN_VEC_VAVGSB
},
5898 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vavgub", ALTIVEC_BUILTIN_VEC_VAVGUB
},
5899 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_cmpb", ALTIVEC_BUILTIN_VEC_CMPB
},
5900 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_cmpeq", ALTIVEC_BUILTIN_VEC_CMPEQ
},
5901 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vcmpeqfp", ALTIVEC_BUILTIN_VEC_VCMPEQFP
},
5902 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vcmpequw", ALTIVEC_BUILTIN_VEC_VCMPEQUW
},
5903 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vcmpequh", ALTIVEC_BUILTIN_VEC_VCMPEQUH
},
5904 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vcmpequb", ALTIVEC_BUILTIN_VEC_VCMPEQUB
},
5905 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_cmpge", ALTIVEC_BUILTIN_VEC_CMPGE
},
5906 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_cmpgt", ALTIVEC_BUILTIN_VEC_CMPGT
},
5907 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vcmpgtfp", ALTIVEC_BUILTIN_VEC_VCMPGTFP
},
5908 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vcmpgtsw", ALTIVEC_BUILTIN_VEC_VCMPGTSW
},
5909 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vcmpgtuw", ALTIVEC_BUILTIN_VEC_VCMPGTUW
},
5910 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vcmpgtsh", ALTIVEC_BUILTIN_VEC_VCMPGTSH
},
5911 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vcmpgtuh", ALTIVEC_BUILTIN_VEC_VCMPGTUH
},
5912 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vcmpgtsb", ALTIVEC_BUILTIN_VEC_VCMPGTSB
},
5913 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vcmpgtub", ALTIVEC_BUILTIN_VEC_VCMPGTUB
},
5914 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_cmple", ALTIVEC_BUILTIN_VEC_CMPLE
},
5915 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_cmplt", ALTIVEC_BUILTIN_VEC_CMPLT
},
5916 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_max", ALTIVEC_BUILTIN_VEC_MAX
},
5917 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmaxfp", ALTIVEC_BUILTIN_VEC_VMAXFP
},
5918 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmaxsw", ALTIVEC_BUILTIN_VEC_VMAXSW
},
5919 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmaxuw", ALTIVEC_BUILTIN_VEC_VMAXUW
},
5920 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmaxsh", ALTIVEC_BUILTIN_VEC_VMAXSH
},
5921 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmaxuh", ALTIVEC_BUILTIN_VEC_VMAXUH
},
5922 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmaxsb", ALTIVEC_BUILTIN_VEC_VMAXSB
},
5923 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmaxub", ALTIVEC_BUILTIN_VEC_VMAXUB
},
5924 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_mergeh", ALTIVEC_BUILTIN_VEC_MERGEH
},
5925 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmrghw", ALTIVEC_BUILTIN_VEC_VMRGHW
},
5926 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmrghh", ALTIVEC_BUILTIN_VEC_VMRGHH
},
5927 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmrghb", ALTIVEC_BUILTIN_VEC_VMRGHB
},
5928 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_mergel", ALTIVEC_BUILTIN_VEC_MERGEL
},
5929 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmrglw", ALTIVEC_BUILTIN_VEC_VMRGLW
},
5930 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmrglh", ALTIVEC_BUILTIN_VEC_VMRGLH
},
5931 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmrglb", ALTIVEC_BUILTIN_VEC_VMRGLB
},
5932 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_min", ALTIVEC_BUILTIN_VEC_MIN
},
5933 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vminfp", ALTIVEC_BUILTIN_VEC_VMINFP
},
5934 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vminsw", ALTIVEC_BUILTIN_VEC_VMINSW
},
5935 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vminuw", ALTIVEC_BUILTIN_VEC_VMINUW
},
5936 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vminsh", ALTIVEC_BUILTIN_VEC_VMINSH
},
5937 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vminuh", ALTIVEC_BUILTIN_VEC_VMINUH
},
5938 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vminsb", ALTIVEC_BUILTIN_VEC_VMINSB
},
5939 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vminub", ALTIVEC_BUILTIN_VEC_VMINUB
},
5940 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_mule", ALTIVEC_BUILTIN_VEC_MULE
},
5941 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmuleub", ALTIVEC_BUILTIN_VEC_VMULEUB
},
5942 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmulesb", ALTIVEC_BUILTIN_VEC_VMULESB
},
5943 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmuleuh", ALTIVEC_BUILTIN_VEC_VMULEUH
},
5944 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmulesh", ALTIVEC_BUILTIN_VEC_VMULESH
},
5945 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_mulo", ALTIVEC_BUILTIN_VEC_MULO
},
5946 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmulosh", ALTIVEC_BUILTIN_VEC_VMULOSH
},
5947 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmulouh", ALTIVEC_BUILTIN_VEC_VMULOUH
},
5948 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmulosb", ALTIVEC_BUILTIN_VEC_VMULOSB
},
5949 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vmuloub", ALTIVEC_BUILTIN_VEC_VMULOUB
},
5950 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_nor", ALTIVEC_BUILTIN_VEC_NOR
},
5951 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_or", ALTIVEC_BUILTIN_VEC_OR
},
5952 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_pack", ALTIVEC_BUILTIN_VEC_PACK
},
5953 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vpkuwum", ALTIVEC_BUILTIN_VEC_VPKUWUM
},
5954 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vpkuhum", ALTIVEC_BUILTIN_VEC_VPKUHUM
},
5955 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_packpx", ALTIVEC_BUILTIN_VEC_PACKPX
},
5956 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_packs", ALTIVEC_BUILTIN_VEC_PACKS
},
5957 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vpkswss", ALTIVEC_BUILTIN_VEC_VPKSWSS
},
5958 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vpkuwus", ALTIVEC_BUILTIN_VEC_VPKUWUS
},
5959 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS
},
5960 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS
},
5961 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU
},
5962 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS
},
5963 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS
},
5964 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL
},
5965 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW
},
5966 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH
},
5967 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB
},
5968 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL
},
5969 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vslw", ALTIVEC_BUILTIN_VEC_VSLW
},
5970 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vslh", ALTIVEC_BUILTIN_VEC_VSLH
},
5971 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vslb", ALTIVEC_BUILTIN_VEC_VSLB
},
5972 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_sll", ALTIVEC_BUILTIN_VEC_SLL
},
5973 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_slo", ALTIVEC_BUILTIN_VEC_SLO
},
5974 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_sr", ALTIVEC_BUILTIN_VEC_SR
},
5975 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsrw", ALTIVEC_BUILTIN_VEC_VSRW
},
5976 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsrh", ALTIVEC_BUILTIN_VEC_VSRH
},
5977 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsrb", ALTIVEC_BUILTIN_VEC_VSRB
},
5978 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_sra", ALTIVEC_BUILTIN_VEC_SRA
},
5979 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsraw", ALTIVEC_BUILTIN_VEC_VSRAW
},
5980 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsrah", ALTIVEC_BUILTIN_VEC_VSRAH
},
5981 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsrab", ALTIVEC_BUILTIN_VEC_VSRAB
},
5982 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_srl", ALTIVEC_BUILTIN_VEC_SRL
},
5983 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_sro", ALTIVEC_BUILTIN_VEC_SRO
},
5984 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_sub", ALTIVEC_BUILTIN_VEC_SUB
},
5985 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsubfp", ALTIVEC_BUILTIN_VEC_VSUBFP
},
5986 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsubuwm", ALTIVEC_BUILTIN_VEC_VSUBUWM
},
5987 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsubuhm", ALTIVEC_BUILTIN_VEC_VSUBUHM
},
5988 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsububm", ALTIVEC_BUILTIN_VEC_VSUBUBM
},
5989 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_subc", ALTIVEC_BUILTIN_VEC_SUBC
},
5990 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_subs", ALTIVEC_BUILTIN_VEC_SUBS
},
5991 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsubsws", ALTIVEC_BUILTIN_VEC_VSUBSWS
},
5992 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsubuws", ALTIVEC_BUILTIN_VEC_VSUBUWS
},
5993 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsubshs", ALTIVEC_BUILTIN_VEC_VSUBSHS
},
5994 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsubuhs", ALTIVEC_BUILTIN_VEC_VSUBUHS
},
5995 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsubsbs", ALTIVEC_BUILTIN_VEC_VSUBSBS
},
5996 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsububs", ALTIVEC_BUILTIN_VEC_VSUBUBS
},
5997 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_sum4s", ALTIVEC_BUILTIN_VEC_SUM4S
},
5998 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsum4shs", ALTIVEC_BUILTIN_VEC_VSUM4SHS
},
5999 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsum4sbs", ALTIVEC_BUILTIN_VEC_VSUM4SBS
},
6000 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vsum4ubs", ALTIVEC_BUILTIN_VEC_VSUM4UBS
},
6001 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_sum2s", ALTIVEC_BUILTIN_VEC_SUM2S
},
6002 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_sums", ALTIVEC_BUILTIN_VEC_SUMS
},
6003 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_xor", ALTIVEC_BUILTIN_VEC_XOR
},
6005 /* Place holder, leave as first spe builtin. */
6006 { 0, CODE_FOR_spe_evaddw
, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW
},
6007 { 0, CODE_FOR_spe_evand
, "__builtin_spe_evand", SPE_BUILTIN_EVAND
},
6008 { 0, CODE_FOR_spe_evandc
, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC
},
6009 { 0, CODE_FOR_spe_evdivws
, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS
},
6010 { 0, CODE_FOR_spe_evdivwu
, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU
},
6011 { 0, CODE_FOR_spe_eveqv
, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV
},
6012 { 0, CODE_FOR_spe_evfsadd
, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD
},
6013 { 0, CODE_FOR_spe_evfsdiv
, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV
},
6014 { 0, CODE_FOR_spe_evfsmul
, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL
},
6015 { 0, CODE_FOR_spe_evfssub
, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB
},
6016 { 0, CODE_FOR_spe_evmergehi
, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI
},
6017 { 0, CODE_FOR_spe_evmergehilo
, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO
},
6018 { 0, CODE_FOR_spe_evmergelo
, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO
},
6019 { 0, CODE_FOR_spe_evmergelohi
, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI
},
6020 { 0, CODE_FOR_spe_evmhegsmfaa
, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA
},
6021 { 0, CODE_FOR_spe_evmhegsmfan
, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN
},
6022 { 0, CODE_FOR_spe_evmhegsmiaa
, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA
},
6023 { 0, CODE_FOR_spe_evmhegsmian
, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN
},
6024 { 0, CODE_FOR_spe_evmhegumiaa
, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA
},
6025 { 0, CODE_FOR_spe_evmhegumian
, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN
},
6026 { 0, CODE_FOR_spe_evmhesmf
, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF
},
6027 { 0, CODE_FOR_spe_evmhesmfa
, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA
},
6028 { 0, CODE_FOR_spe_evmhesmfaaw
, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW
},
6029 { 0, CODE_FOR_spe_evmhesmfanw
, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW
},
6030 { 0, CODE_FOR_spe_evmhesmi
, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI
},
6031 { 0, CODE_FOR_spe_evmhesmia
, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA
},
6032 { 0, CODE_FOR_spe_evmhesmiaaw
, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW
},
6033 { 0, CODE_FOR_spe_evmhesmianw
, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW
},
6034 { 0, CODE_FOR_spe_evmhessf
, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF
},
6035 { 0, CODE_FOR_spe_evmhessfa
, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA
},
6036 { 0, CODE_FOR_spe_evmhessfaaw
, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW
},
6037 { 0, CODE_FOR_spe_evmhessfanw
, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW
},
6038 { 0, CODE_FOR_spe_evmhessiaaw
, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW
},
6039 { 0, CODE_FOR_spe_evmhessianw
, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW
},
6040 { 0, CODE_FOR_spe_evmheumi
, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI
},
6041 { 0, CODE_FOR_spe_evmheumia
, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA
},
6042 { 0, CODE_FOR_spe_evmheumiaaw
, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW
},
6043 { 0, CODE_FOR_spe_evmheumianw
, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW
},
6044 { 0, CODE_FOR_spe_evmheusiaaw
, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW
},
6045 { 0, CODE_FOR_spe_evmheusianw
, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW
},
6046 { 0, CODE_FOR_spe_evmhogsmfaa
, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA
},
6047 { 0, CODE_FOR_spe_evmhogsmfan
, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN
},
6048 { 0, CODE_FOR_spe_evmhogsmiaa
, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA
},
6049 { 0, CODE_FOR_spe_evmhogsmian
, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN
},
6050 { 0, CODE_FOR_spe_evmhogumiaa
, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA
},
6051 { 0, CODE_FOR_spe_evmhogumian
, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN
},
6052 { 0, CODE_FOR_spe_evmhosmf
, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF
},
6053 { 0, CODE_FOR_spe_evmhosmfa
, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA
},
6054 { 0, CODE_FOR_spe_evmhosmfaaw
, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW
},
6055 { 0, CODE_FOR_spe_evmhosmfanw
, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW
},
6056 { 0, CODE_FOR_spe_evmhosmi
, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI
},
6057 { 0, CODE_FOR_spe_evmhosmia
, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA
},
6058 { 0, CODE_FOR_spe_evmhosmiaaw
, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW
},
6059 { 0, CODE_FOR_spe_evmhosmianw
, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW
},
6060 { 0, CODE_FOR_spe_evmhossf
, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF
},
6061 { 0, CODE_FOR_spe_evmhossfa
, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA
},
6062 { 0, CODE_FOR_spe_evmhossfaaw
, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW
},
6063 { 0, CODE_FOR_spe_evmhossfanw
, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW
},
6064 { 0, CODE_FOR_spe_evmhossiaaw
, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW
},
6065 { 0, CODE_FOR_spe_evmhossianw
, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW
},
6066 { 0, CODE_FOR_spe_evmhoumi
, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI
},
6067 { 0, CODE_FOR_spe_evmhoumia
, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA
},
6068 { 0, CODE_FOR_spe_evmhoumiaaw
, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW
},
6069 { 0, CODE_FOR_spe_evmhoumianw
, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW
},
6070 { 0, CODE_FOR_spe_evmhousiaaw
, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW
},
6071 { 0, CODE_FOR_spe_evmhousianw
, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW
},
6072 { 0, CODE_FOR_spe_evmwhsmf
, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF
},
6073 { 0, CODE_FOR_spe_evmwhsmfa
, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA
},
6074 { 0, CODE_FOR_spe_evmwhsmi
, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI
},
6075 { 0, CODE_FOR_spe_evmwhsmia
, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA
},
6076 { 0, CODE_FOR_spe_evmwhssf
, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF
},
6077 { 0, CODE_FOR_spe_evmwhssfa
, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA
},
6078 { 0, CODE_FOR_spe_evmwhumi
, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI
},
6079 { 0, CODE_FOR_spe_evmwhumia
, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA
},
6080 { 0, CODE_FOR_spe_evmwlsmiaaw
, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW
},
6081 { 0, CODE_FOR_spe_evmwlsmianw
, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW
},
6082 { 0, CODE_FOR_spe_evmwlssiaaw
, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW
},
6083 { 0, CODE_FOR_spe_evmwlssianw
, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW
},
6084 { 0, CODE_FOR_spe_evmwlumi
, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI
},
6085 { 0, CODE_FOR_spe_evmwlumia
, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA
},
6086 { 0, CODE_FOR_spe_evmwlumiaaw
, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW
},
6087 { 0, CODE_FOR_spe_evmwlumianw
, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW
},
6088 { 0, CODE_FOR_spe_evmwlusiaaw
, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW
},
6089 { 0, CODE_FOR_spe_evmwlusianw
, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW
},
6090 { 0, CODE_FOR_spe_evmwsmf
, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF
},
6091 { 0, CODE_FOR_spe_evmwsmfa
, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA
},
6092 { 0, CODE_FOR_spe_evmwsmfaa
, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA
},
6093 { 0, CODE_FOR_spe_evmwsmfan
, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN
},
6094 { 0, CODE_FOR_spe_evmwsmi
, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI
},
6095 { 0, CODE_FOR_spe_evmwsmia
, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA
},
6096 { 0, CODE_FOR_spe_evmwsmiaa
, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA
},
6097 { 0, CODE_FOR_spe_evmwsmian
, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN
},
6098 { 0, CODE_FOR_spe_evmwssf
, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF
},
6099 { 0, CODE_FOR_spe_evmwssfa
, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA
},
6100 { 0, CODE_FOR_spe_evmwssfaa
, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA
},
6101 { 0, CODE_FOR_spe_evmwssfan
, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN
},
6102 { 0, CODE_FOR_spe_evmwumi
, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI
},
6103 { 0, CODE_FOR_spe_evmwumia
, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA
},
6104 { 0, CODE_FOR_spe_evmwumiaa
, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA
},
6105 { 0, CODE_FOR_spe_evmwumian
, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN
},
6106 { 0, CODE_FOR_spe_evnand
, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND
},
6107 { 0, CODE_FOR_spe_evnor
, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR
},
6108 { 0, CODE_FOR_spe_evor
, "__builtin_spe_evor", SPE_BUILTIN_EVOR
},
6109 { 0, CODE_FOR_spe_evorc
, "__builtin_spe_evorc", SPE_BUILTIN_EVORC
},
6110 { 0, CODE_FOR_spe_evrlw
, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW
},
6111 { 0, CODE_FOR_spe_evslw
, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW
},
6112 { 0, CODE_FOR_spe_evsrws
, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS
},
6113 { 0, CODE_FOR_spe_evsrwu
, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU
},
6114 { 0, CODE_FOR_spe_evsubfw
, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW
},
6116 /* SPE binary operations expecting a 5-bit unsigned literal. */
6117 { 0, CODE_FOR_spe_evaddiw
, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW
},
6119 { 0, CODE_FOR_spe_evrlwi
, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI
},
6120 { 0, CODE_FOR_spe_evslwi
, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI
},
6121 { 0, CODE_FOR_spe_evsrwis
, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS
},
6122 { 0, CODE_FOR_spe_evsrwiu
, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU
},
6123 { 0, CODE_FOR_spe_evsubifw
, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW
},
6124 { 0, CODE_FOR_spe_evmwhssfaa
, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA
},
6125 { 0, CODE_FOR_spe_evmwhssmaa
, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA
},
6126 { 0, CODE_FOR_spe_evmwhsmfaa
, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA
},
6127 { 0, CODE_FOR_spe_evmwhsmiaa
, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA
},
6128 { 0, CODE_FOR_spe_evmwhusiaa
, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA
},
6129 { 0, CODE_FOR_spe_evmwhumiaa
, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA
},
6130 { 0, CODE_FOR_spe_evmwhssfan
, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN
},
6131 { 0, CODE_FOR_spe_evmwhssian
, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN
},
6132 { 0, CODE_FOR_spe_evmwhsmfan
, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN
},
6133 { 0, CODE_FOR_spe_evmwhsmian
, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN
},
6134 { 0, CODE_FOR_spe_evmwhusian
, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN
},
6135 { 0, CODE_FOR_spe_evmwhumian
, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN
},
6136 { 0, CODE_FOR_spe_evmwhgssfaa
, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA
},
6137 { 0, CODE_FOR_spe_evmwhgsmfaa
, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA
},
6138 { 0, CODE_FOR_spe_evmwhgsmiaa
, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA
},
6139 { 0, CODE_FOR_spe_evmwhgumiaa
, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA
},
6140 { 0, CODE_FOR_spe_evmwhgssfan
, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN
},
6141 { 0, CODE_FOR_spe_evmwhgsmfan
, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN
},
6142 { 0, CODE_FOR_spe_evmwhgsmian
, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN
},
6143 { 0, CODE_FOR_spe_evmwhgumian
, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN
},
6144 { 0, CODE_FOR_spe_brinc
, "__builtin_spe_brinc", SPE_BUILTIN_BRINC
},
6146 /* Place-holder. Leave as last binary SPE builtin. */
6147 { 0, CODE_FOR_xorv2si3
, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR
}
6150 /* AltiVec predicates. */
6152 struct builtin_description_predicates
6154 const unsigned int mask
;
6155 const enum insn_code icode
;
6157 const char *const name
;
6158 const enum rs6000_builtins code
;
6161 static const struct builtin_description_predicates bdesc_altivec_preds
[] =
6163 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P
},
6164 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P
},
6165 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P
},
6166 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P
},
6167 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P
},
6168 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P
},
6169 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P
},
6170 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P
},
6171 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P
},
6172 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P
},
6173 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P
},
6174 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P
},
6175 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P
},
6177 { MASK_ALTIVEC
, 0, NULL
, "__builtin_vec_vcmpeq_p", ALTIVEC_BUILTIN_VCMPEQ_P
},
6178 { MASK_ALTIVEC
, 0, NULL
, "__builtin_vec_vcmpgt_p", ALTIVEC_BUILTIN_VCMPGT_P
},
6179 { MASK_ALTIVEC
, 0, NULL
, "__builtin_vec_vcmpge_p", ALTIVEC_BUILTIN_VCMPGE_P
}
6182 /* SPE predicates. */
6183 static struct builtin_description bdesc_spe_predicates
[] =
6185 /* Place-holder. Leave as first. */
6186 { 0, CODE_FOR_spe_evcmpeq
, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ
},
6187 { 0, CODE_FOR_spe_evcmpgts
, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS
},
6188 { 0, CODE_FOR_spe_evcmpgtu
, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU
},
6189 { 0, CODE_FOR_spe_evcmplts
, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS
},
6190 { 0, CODE_FOR_spe_evcmpltu
, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU
},
6191 { 0, CODE_FOR_spe_evfscmpeq
, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ
},
6192 { 0, CODE_FOR_spe_evfscmpgt
, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT
},
6193 { 0, CODE_FOR_spe_evfscmplt
, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT
},
6194 { 0, CODE_FOR_spe_evfststeq
, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ
},
6195 { 0, CODE_FOR_spe_evfststgt
, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT
},
6196 /* Place-holder. Leave as last. */
6197 { 0, CODE_FOR_spe_evfststlt
, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT
},
6200 /* SPE evsel predicates. */
6201 static struct builtin_description bdesc_spe_evsel
[] =
6203 /* Place-holder. Leave as first. */
6204 { 0, CODE_FOR_spe_evcmpgts
, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS
},
6205 { 0, CODE_FOR_spe_evcmpgtu
, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU
},
6206 { 0, CODE_FOR_spe_evcmplts
, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS
},
6207 { 0, CODE_FOR_spe_evcmpltu
, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU
},
6208 { 0, CODE_FOR_spe_evcmpeq
, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ
},
6209 { 0, CODE_FOR_spe_evfscmpgt
, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT
},
6210 { 0, CODE_FOR_spe_evfscmplt
, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT
},
6211 { 0, CODE_FOR_spe_evfscmpeq
, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ
},
6212 { 0, CODE_FOR_spe_evfststgt
, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT
},
6213 { 0, CODE_FOR_spe_evfststlt
, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT
},
6214 /* Place-holder. Leave as last. */
6215 { 0, CODE_FOR_spe_evfststeq
, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ
},
6218 /* ABS* operations. */
6220 static const struct builtin_description bdesc_abs
[] =
6222 { MASK_ALTIVEC
, CODE_FOR_absv4si2
, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI
},
6223 { MASK_ALTIVEC
, CODE_FOR_absv8hi2
, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI
},
6224 { MASK_ALTIVEC
, CODE_FOR_absv4sf2
, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF
},
6225 { MASK_ALTIVEC
, CODE_FOR_absv16qi2
, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI
},
6226 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v4si
, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI
},
6227 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v8hi
, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI
},
6228 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v16qi
, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI
}
6231 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
6234 static struct builtin_description bdesc_1arg
[] =
6236 { MASK_ALTIVEC
, CODE_FOR_altivec_vexptefp
, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP
},
6237 { MASK_ALTIVEC
, CODE_FOR_altivec_vlogefp
, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP
},
6238 { MASK_ALTIVEC
, CODE_FOR_altivec_vrefp
, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP
},
6239 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfim
, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM
},
6240 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfin
, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN
},
6241 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfip
, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP
},
6242 { MASK_ALTIVEC
, CODE_FOR_ftruncv4sf2
, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ
},
6243 { MASK_ALTIVEC
, CODE_FOR_altivec_vrsqrtefp
, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP
},
6244 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisb
, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB
},
6245 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltish
, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH
},
6246 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisw
, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW
},
6247 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsb
, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB
},
6248 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhpx
, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX
},
6249 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsh
, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH
},
6250 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsb
, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB
},
6251 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklpx
, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX
},
6252 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsh
, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH
},
6254 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_abs", ALTIVEC_BUILTIN_VEC_ABS
},
6255 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_abss", ALTIVEC_BUILTIN_VEC_ABSS
},
6256 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_ceil", ALTIVEC_BUILTIN_VEC_CEIL
},
6257 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_expte", ALTIVEC_BUILTIN_VEC_EXPTE
},
6258 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR
},
6259 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE
},
6260 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR
},
6261 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE
},
6262 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND
},
6263 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE
},
6264 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC
},
6265 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH
},
6266 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH
},
6267 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX
},
6268 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vupkhsb", ALTIVEC_BUILTIN_VEC_VUPKHSB
},
6269 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_unpackl", ALTIVEC_BUILTIN_VEC_UNPACKL
},
6270 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vupklpx", ALTIVEC_BUILTIN_VEC_VUPKLPX
},
6271 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vupklsh", ALTIVEC_BUILTIN_VEC_VUPKLSH
},
6272 { MASK_ALTIVEC
, CODE_FOR_nothing
, "__builtin_vec_vupklsb", ALTIVEC_BUILTIN_VEC_VUPKLSB
},
6274 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
6275 end with SPE_BUILTIN_EVSUBFUSIAAW. */
6276 { 0, CODE_FOR_spe_evabs
, "__builtin_spe_evabs", SPE_BUILTIN_EVABS
},
6277 { 0, CODE_FOR_spe_evaddsmiaaw
, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW
},
6278 { 0, CODE_FOR_spe_evaddssiaaw
, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW
},
6279 { 0, CODE_FOR_spe_evaddumiaaw
, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW
},
6280 { 0, CODE_FOR_spe_evaddusiaaw
, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW
},
6281 { 0, CODE_FOR_spe_evcntlsw
, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW
},
6282 { 0, CODE_FOR_spe_evcntlzw
, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW
},
6283 { 0, CODE_FOR_spe_evextsb
, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB
},
6284 { 0, CODE_FOR_spe_evextsh
, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH
},
6285 { 0, CODE_FOR_spe_evfsabs
, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS
},
6286 { 0, CODE_FOR_spe_evfscfsf
, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF
},
6287 { 0, CODE_FOR_spe_evfscfsi
, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI
},
6288 { 0, CODE_FOR_spe_evfscfuf
, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF
},
6289 { 0, CODE_FOR_spe_evfscfui
, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI
},
6290 { 0, CODE_FOR_spe_evfsctsf
, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF
},
6291 { 0, CODE_FOR_spe_evfsctsi
, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI
},
6292 { 0, CODE_FOR_spe_evfsctsiz
, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ
},
6293 { 0, CODE_FOR_spe_evfsctuf
, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF
},
6294 { 0, CODE_FOR_spe_evfsctui
, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI
},
6295 { 0, CODE_FOR_spe_evfsctuiz
, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ
},
6296 { 0, CODE_FOR_spe_evfsnabs
, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS
},
6297 { 0, CODE_FOR_spe_evfsneg
, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG
},
6298 { 0, CODE_FOR_spe_evmra
, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA
},
6299 { 0, CODE_FOR_negv2si2
, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG
},
6300 { 0, CODE_FOR_spe_evrndw
, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW
},
6301 { 0, CODE_FOR_spe_evsubfsmiaaw
, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW
},
6302 { 0, CODE_FOR_spe_evsubfssiaaw
, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW
},
6303 { 0, CODE_FOR_spe_evsubfumiaaw
, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW
},
6305 /* Place-holder. Leave as last unary SPE builtin. */
6306 { 0, CODE_FOR_spe_evsubfusiaaw
, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW
}
6310 rs6000_expand_unop_builtin (enum insn_code icode
, tree arglist
, rtx target
)
6313 tree arg0
= TREE_VALUE (arglist
);
6314 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6315 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
6316 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
6318 if (icode
== CODE_FOR_nothing
)
6319 /* Builtin not supported on this processor. */
6322 /* If we got invalid arguments bail out before generating bad rtl. */
6323 if (arg0
== error_mark_node
)
6326 if (icode
== CODE_FOR_altivec_vspltisb
6327 || icode
== CODE_FOR_altivec_vspltish
6328 || icode
== CODE_FOR_altivec_vspltisw
6329 || icode
== CODE_FOR_spe_evsplatfi
6330 || icode
== CODE_FOR_spe_evsplati
)
6332 /* Only allow 5-bit *signed* literals. */
6333 if (GET_CODE (op0
) != CONST_INT
6334 || INTVAL (op0
) > 15
6335 || INTVAL (op0
) < -16)
6337 error ("argument 1 must be a 5-bit signed literal");
6343 || GET_MODE (target
) != tmode
6344 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6345 target
= gen_reg_rtx (tmode
);
6347 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
6348 op0
= copy_to_mode_reg (mode0
, op0
);
6350 pat
= GEN_FCN (icode
) (target
, op0
);
6359 altivec_expand_abs_builtin (enum insn_code icode
, tree arglist
, rtx target
)
6361 rtx pat
, scratch1
, scratch2
;
6362 tree arg0
= TREE_VALUE (arglist
);
6363 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6364 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
6365 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
6367 /* If we have invalid arguments, bail out before generating bad rtl. */
6368 if (arg0
== error_mark_node
)
6372 || GET_MODE (target
) != tmode
6373 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6374 target
= gen_reg_rtx (tmode
);
6376 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
6377 op0
= copy_to_mode_reg (mode0
, op0
);
6379 scratch1
= gen_reg_rtx (mode0
);
6380 scratch2
= gen_reg_rtx (mode0
);
6382 pat
= GEN_FCN (icode
) (target
, op0
, scratch1
, scratch2
);
6391 rs6000_expand_binop_builtin (enum insn_code icode
, tree arglist
, rtx target
)
6394 tree arg0
= TREE_VALUE (arglist
);
6395 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
6396 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6397 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
6398 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
6399 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
6400 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
6402 if (icode
== CODE_FOR_nothing
)
6403 /* Builtin not supported on this processor. */
6406 /* If we got invalid arguments bail out before generating bad rtl. */
6407 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
6410 if (icode
== CODE_FOR_altivec_vcfux
6411 || icode
== CODE_FOR_altivec_vcfsx
6412 || icode
== CODE_FOR_altivec_vctsxs
6413 || icode
== CODE_FOR_altivec_vctuxs
6414 || icode
== CODE_FOR_altivec_vspltb
6415 || icode
== CODE_FOR_altivec_vsplth
6416 || icode
== CODE_FOR_altivec_vspltw
6417 || icode
== CODE_FOR_spe_evaddiw
6418 || icode
== CODE_FOR_spe_evldd
6419 || icode
== CODE_FOR_spe_evldh
6420 || icode
== CODE_FOR_spe_evldw
6421 || icode
== CODE_FOR_spe_evlhhesplat
6422 || icode
== CODE_FOR_spe_evlhhossplat
6423 || icode
== CODE_FOR_spe_evlhhousplat
6424 || icode
== CODE_FOR_spe_evlwhe
6425 || icode
== CODE_FOR_spe_evlwhos
6426 || icode
== CODE_FOR_spe_evlwhou
6427 || icode
== CODE_FOR_spe_evlwhsplat
6428 || icode
== CODE_FOR_spe_evlwwsplat
6429 || icode
== CODE_FOR_spe_evrlwi
6430 || icode
== CODE_FOR_spe_evslwi
6431 || icode
== CODE_FOR_spe_evsrwis
6432 || icode
== CODE_FOR_spe_evsubifw
6433 || icode
== CODE_FOR_spe_evsrwiu
)
6435 /* Only allow 5-bit unsigned literals. */
6437 if (TREE_CODE (arg1
) != INTEGER_CST
6438 || TREE_INT_CST_LOW (arg1
) & ~0x1f)
6440 error ("argument 2 must be a 5-bit unsigned literal");
6446 || GET_MODE (target
) != tmode
6447 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6448 target
= gen_reg_rtx (tmode
);
6450 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
6451 op0
= copy_to_mode_reg (mode0
, op0
);
6452 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
6453 op1
= copy_to_mode_reg (mode1
, op1
);
6455 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
6464 altivec_expand_predicate_builtin (enum insn_code icode
, const char *opcode
,
6465 tree arglist
, rtx target
)
6468 tree cr6_form
= TREE_VALUE (arglist
);
6469 tree arg0
= TREE_VALUE (TREE_CHAIN (arglist
));
6470 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
6471 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6472 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
6473 enum machine_mode tmode
= SImode
;
6474 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
6475 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
6478 if (TREE_CODE (cr6_form
) != INTEGER_CST
)
6480 error ("argument 1 of __builtin_altivec_predicate must be a constant");
6484 cr6_form_int
= TREE_INT_CST_LOW (cr6_form
);
6486 gcc_assert (mode0
== mode1
);
6488 /* If we have invalid arguments, bail out before generating bad rtl. */
6489 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
6493 || GET_MODE (target
) != tmode
6494 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6495 target
= gen_reg_rtx (tmode
);
6497 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
6498 op0
= copy_to_mode_reg (mode0
, op0
);
6499 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
6500 op1
= copy_to_mode_reg (mode1
, op1
);
6502 scratch
= gen_reg_rtx (mode0
);
6504 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
,
6505 gen_rtx_SYMBOL_REF (Pmode
, opcode
));
6510 /* The vec_any* and vec_all* predicates use the same opcodes for two
6511 different operations, but the bits in CR6 will be different
6512 depending on what information we want. So we have to play tricks
6513 with CR6 to get the right bits out.
6515 If you think this is disgusting, look at the specs for the
6516 AltiVec predicates. */
6518 switch (cr6_form_int
)
6521 emit_insn (gen_cr6_test_for_zero (target
));
6524 emit_insn (gen_cr6_test_for_zero_reverse (target
));
6527 emit_insn (gen_cr6_test_for_lt (target
));
6530 emit_insn (gen_cr6_test_for_lt_reverse (target
));
6533 error ("argument 1 of __builtin_altivec_predicate is out of range");
6541 altivec_expand_lv_builtin (enum insn_code icode
, tree arglist
, rtx target
)
6544 tree arg0
= TREE_VALUE (arglist
);
6545 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
6546 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
6547 enum machine_mode mode0
= Pmode
;
6548 enum machine_mode mode1
= Pmode
;
6549 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6550 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
6552 if (icode
== CODE_FOR_nothing
)
6553 /* Builtin not supported on this processor. */
6556 /* If we got invalid arguments bail out before generating bad rtl. */
6557 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
6561 || GET_MODE (target
) != tmode
6562 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6563 target
= gen_reg_rtx (tmode
);
6565 op1
= copy_to_mode_reg (mode1
, op1
);
6567 if (op0
== const0_rtx
)
6569 addr
= gen_rtx_MEM (tmode
, op1
);
6573 op0
= copy_to_mode_reg (mode0
, op0
);
6574 addr
= gen_rtx_MEM (tmode
, gen_rtx_PLUS (Pmode
, op0
, op1
));
6577 pat
= GEN_FCN (icode
) (target
, addr
);
6587 spe_expand_stv_builtin (enum insn_code icode
, tree arglist
)
6589 tree arg0
= TREE_VALUE (arglist
);
6590 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
6591 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
6592 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6593 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
6594 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
6596 enum machine_mode mode0
= insn_data
[icode
].operand
[0].mode
;
6597 enum machine_mode mode1
= insn_data
[icode
].operand
[1].mode
;
6598 enum machine_mode mode2
= insn_data
[icode
].operand
[2].mode
;
6600 /* Invalid arguments. Bail before doing anything stoopid! */
6601 if (arg0
== error_mark_node
6602 || arg1
== error_mark_node
6603 || arg2
== error_mark_node
)
6606 if (! (*insn_data
[icode
].operand
[2].predicate
) (op0
, mode2
))
6607 op0
= copy_to_mode_reg (mode2
, op0
);
6608 if (! (*insn_data
[icode
].operand
[0].predicate
) (op1
, mode0
))
6609 op1
= copy_to_mode_reg (mode0
, op1
);
6610 if (! (*insn_data
[icode
].operand
[1].predicate
) (op2
, mode1
))
6611 op2
= copy_to_mode_reg (mode1
, op2
);
6613 pat
= GEN_FCN (icode
) (op1
, op2
, op0
);
6620 altivec_expand_stv_builtin (enum insn_code icode
, tree arglist
)
6622 tree arg0
= TREE_VALUE (arglist
);
6623 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
6624 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
6625 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6626 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
6627 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
6629 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
6630 enum machine_mode mode1
= Pmode
;
6631 enum machine_mode mode2
= Pmode
;
6633 /* Invalid arguments. Bail before doing anything stoopid! */
6634 if (arg0
== error_mark_node
6635 || arg1
== error_mark_node
6636 || arg2
== error_mark_node
)
6639 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, tmode
))
6640 op0
= copy_to_mode_reg (tmode
, op0
);
6642 op2
= copy_to_mode_reg (mode2
, op2
);
6644 if (op1
== const0_rtx
)
6646 addr
= gen_rtx_MEM (tmode
, op2
);
6650 op1
= copy_to_mode_reg (mode1
, op1
);
6651 addr
= gen_rtx_MEM (tmode
, gen_rtx_PLUS (Pmode
, op1
, op2
));
6654 pat
= GEN_FCN (icode
) (addr
, op0
);
6661 rs6000_expand_ternop_builtin (enum insn_code icode
, tree arglist
, rtx target
)
6664 tree arg0
= TREE_VALUE (arglist
);
6665 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
6666 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
6667 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6668 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
6669 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
6670 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
6671 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
6672 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
6673 enum machine_mode mode2
= insn_data
[icode
].operand
[3].mode
;
6675 if (icode
== CODE_FOR_nothing
)
6676 /* Builtin not supported on this processor. */
6679 /* If we got invalid arguments bail out before generating bad rtl. */
6680 if (arg0
== error_mark_node
6681 || arg1
== error_mark_node
6682 || arg2
== error_mark_node
)
6685 if (icode
== CODE_FOR_altivec_vsldoi_v4sf
6686 || icode
== CODE_FOR_altivec_vsldoi_v4si
6687 || icode
== CODE_FOR_altivec_vsldoi_v8hi
6688 || icode
== CODE_FOR_altivec_vsldoi_v16qi
)
6690 /* Only allow 4-bit unsigned literals. */
6692 if (TREE_CODE (arg2
) != INTEGER_CST
6693 || TREE_INT_CST_LOW (arg2
) & ~0xf)
6695 error ("argument 3 must be a 4-bit unsigned literal");
6701 || GET_MODE (target
) != tmode
6702 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6703 target
= gen_reg_rtx (tmode
);
6705 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
6706 op0
= copy_to_mode_reg (mode0
, op0
);
6707 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
6708 op1
= copy_to_mode_reg (mode1
, op1
);
6709 if (! (*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
))
6710 op2
= copy_to_mode_reg (mode2
, op2
);
6712 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
);
6720 /* Expand the lvx builtins. */
6722 altivec_expand_ld_builtin (tree exp
, rtx target
, bool *expandedp
)
6724 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6725 tree arglist
= TREE_OPERAND (exp
, 1);
6726 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
6728 enum machine_mode tmode
, mode0
;
6730 enum insn_code icode
;
6734 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi
:
6735 icode
= CODE_FOR_altivec_lvx_v16qi
;
6737 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi
:
6738 icode
= CODE_FOR_altivec_lvx_v8hi
;
6740 case ALTIVEC_BUILTIN_LD_INTERNAL_4si
:
6741 icode
= CODE_FOR_altivec_lvx_v4si
;
6743 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf
:
6744 icode
= CODE_FOR_altivec_lvx_v4sf
;
6753 arg0
= TREE_VALUE (arglist
);
6754 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6755 tmode
= insn_data
[icode
].operand
[0].mode
;
6756 mode0
= insn_data
[icode
].operand
[1].mode
;
6759 || GET_MODE (target
) != tmode
6760 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6761 target
= gen_reg_rtx (tmode
);
6763 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
6764 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
6766 pat
= GEN_FCN (icode
) (target
, op0
);
6773 /* Expand the stvx builtins. */
6775 altivec_expand_st_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
6778 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6779 tree arglist
= TREE_OPERAND (exp
, 1);
6780 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
6782 enum machine_mode mode0
, mode1
;
6784 enum insn_code icode
;
6788 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi
:
6789 icode
= CODE_FOR_altivec_stvx_v16qi
;
6791 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi
:
6792 icode
= CODE_FOR_altivec_stvx_v8hi
;
6794 case ALTIVEC_BUILTIN_ST_INTERNAL_4si
:
6795 icode
= CODE_FOR_altivec_stvx_v4si
;
6797 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf
:
6798 icode
= CODE_FOR_altivec_stvx_v4sf
;
6805 arg0
= TREE_VALUE (arglist
);
6806 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
6807 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6808 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
6809 mode0
= insn_data
[icode
].operand
[0].mode
;
6810 mode1
= insn_data
[icode
].operand
[1].mode
;
6812 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
6813 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
6814 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
6815 op1
= copy_to_mode_reg (mode1
, op1
);
6817 pat
= GEN_FCN (icode
) (op0
, op1
);
6825 /* Expand the dst builtins. */
6827 altivec_expand_dst_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
6830 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6831 tree arglist
= TREE_OPERAND (exp
, 1);
6832 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
6833 tree arg0
, arg1
, arg2
;
6834 enum machine_mode mode0
, mode1
, mode2
;
6835 rtx pat
, op0
, op1
, op2
;
6836 struct builtin_description
*d
;
6841 /* Handle DST variants. */
6842 d
= (struct builtin_description
*) bdesc_dst
;
6843 for (i
= 0; i
< ARRAY_SIZE (bdesc_dst
); i
++, d
++)
6844 if (d
->code
== fcode
)
6846 arg0
= TREE_VALUE (arglist
);
6847 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
6848 arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
6849 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6850 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
6851 op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
6852 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
6853 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
6854 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
6856 /* Invalid arguments, bail out before generating bad rtl. */
6857 if (arg0
== error_mark_node
6858 || arg1
== error_mark_node
6859 || arg2
== error_mark_node
)
6864 if (TREE_CODE (arg2
) != INTEGER_CST
6865 || TREE_INT_CST_LOW (arg2
) & ~0x3)
6867 error ("argument to %qs must be a 2-bit unsigned literal", d
->name
);
6871 if (! (*insn_data
[d
->icode
].operand
[0].predicate
) (op0
, mode0
))
6872 op0
= copy_to_mode_reg (Pmode
, op0
);
6873 if (! (*insn_data
[d
->icode
].operand
[1].predicate
) (op1
, mode1
))
6874 op1
= copy_to_mode_reg (mode1
, op1
);
6876 pat
= GEN_FCN (d
->icode
) (op0
, op1
, op2
);
6886 /* Expand the builtin in EXP and store the result in TARGET. Store
6887 true in *EXPANDEDP if we found a builtin to expand. */
6889 altivec_expand_builtin (tree exp
, rtx target
, bool *expandedp
)
6891 struct builtin_description
*d
;
6892 struct builtin_description_predicates
*dp
;
6894 enum insn_code icode
;
6895 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6896 tree arglist
= TREE_OPERAND (exp
, 1);
6899 enum machine_mode tmode
, mode0
;
6900 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
6902 if (fcode
>= ALTIVEC_BUILTIN_OVERLOADED_FIRST
6903 && fcode
<= ALTIVEC_BUILTIN_OVERLOADED_LAST
)
6906 error ("unresolved overload for Altivec builtin %qF", fndecl
);
6910 target
= altivec_expand_ld_builtin (exp
, target
, expandedp
);
6914 target
= altivec_expand_st_builtin (exp
, target
, expandedp
);
6918 target
= altivec_expand_dst_builtin (exp
, target
, expandedp
);
6926 case ALTIVEC_BUILTIN_STVX
:
6927 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx
, arglist
);
6928 case ALTIVEC_BUILTIN_STVEBX
:
6929 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx
, arglist
);
6930 case ALTIVEC_BUILTIN_STVEHX
:
6931 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx
, arglist
);
6932 case ALTIVEC_BUILTIN_STVEWX
:
6933 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx
, arglist
);
6934 case ALTIVEC_BUILTIN_STVXL
:
6935 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl
, arglist
);
6937 case ALTIVEC_BUILTIN_MFVSCR
:
6938 icode
= CODE_FOR_altivec_mfvscr
;
6939 tmode
= insn_data
[icode
].operand
[0].mode
;
6942 || GET_MODE (target
) != tmode
6943 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6944 target
= gen_reg_rtx (tmode
);
6946 pat
= GEN_FCN (icode
) (target
);
6952 case ALTIVEC_BUILTIN_MTVSCR
:
6953 icode
= CODE_FOR_altivec_mtvscr
;
6954 arg0
= TREE_VALUE (arglist
);
6955 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6956 mode0
= insn_data
[icode
].operand
[0].mode
;
6958 /* If we got invalid arguments bail out before generating bad rtl. */
6959 if (arg0
== error_mark_node
)
6962 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
6963 op0
= copy_to_mode_reg (mode0
, op0
);
6965 pat
= GEN_FCN (icode
) (op0
);
6970 case ALTIVEC_BUILTIN_DSSALL
:
6971 emit_insn (gen_altivec_dssall ());
6974 case ALTIVEC_BUILTIN_DSS
:
6975 icode
= CODE_FOR_altivec_dss
;
6976 arg0
= TREE_VALUE (arglist
);
6978 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6979 mode0
= insn_data
[icode
].operand
[0].mode
;
6981 /* If we got invalid arguments bail out before generating bad rtl. */
6982 if (arg0
== error_mark_node
)
6985 if (TREE_CODE (arg0
) != INTEGER_CST
6986 || TREE_INT_CST_LOW (arg0
) & ~0x3)
6988 error ("argument to dss must be a 2-bit unsigned literal");
6992 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
6993 op0
= copy_to_mode_reg (mode0
, op0
);
6995 emit_insn (gen_altivec_dss (op0
));
6999 /* Expand abs* operations. */
7000 d
= (struct builtin_description
*) bdesc_abs
;
7001 for (i
= 0; i
< ARRAY_SIZE (bdesc_abs
); i
++, d
++)
7002 if (d
->code
== fcode
)
7003 return altivec_expand_abs_builtin (d
->icode
, arglist
, target
);
7005 /* Expand the AltiVec predicates. */
7006 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
7007 for (i
= 0; i
< ARRAY_SIZE (bdesc_altivec_preds
); i
++, dp
++)
7008 if (dp
->code
== fcode
)
7009 return altivec_expand_predicate_builtin (dp
->icode
, dp
->opcode
,
7012 /* LV* are funky. We initialized them differently. */
7015 case ALTIVEC_BUILTIN_LVSL
:
7016 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl
,
7018 case ALTIVEC_BUILTIN_LVSR
:
7019 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr
,
7021 case ALTIVEC_BUILTIN_LVEBX
:
7022 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx
,
7024 case ALTIVEC_BUILTIN_LVEHX
:
7025 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx
,
7027 case ALTIVEC_BUILTIN_LVEWX
:
7028 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx
,
7030 case ALTIVEC_BUILTIN_LVXL
:
7031 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl
,
7033 case ALTIVEC_BUILTIN_LVX
:
7034 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx
,
7045 /* Binops that need to be initialized manually, but can be expanded
7046 automagically by rs6000_expand_binop_builtin. */
7047 static struct builtin_description bdesc_2arg_spe
[] =
7049 { 0, CODE_FOR_spe_evlddx
, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX
},
7050 { 0, CODE_FOR_spe_evldwx
, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX
},
7051 { 0, CODE_FOR_spe_evldhx
, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX
},
7052 { 0, CODE_FOR_spe_evlwhex
, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX
},
7053 { 0, CODE_FOR_spe_evlwhoux
, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX
},
7054 { 0, CODE_FOR_spe_evlwhosx
, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX
},
7055 { 0, CODE_FOR_spe_evlwwsplatx
, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX
},
7056 { 0, CODE_FOR_spe_evlwhsplatx
, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX
},
7057 { 0, CODE_FOR_spe_evlhhesplatx
, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX
},
7058 { 0, CODE_FOR_spe_evlhhousplatx
, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX
},
7059 { 0, CODE_FOR_spe_evlhhossplatx
, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX
},
7060 { 0, CODE_FOR_spe_evldd
, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD
},
7061 { 0, CODE_FOR_spe_evldw
, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW
},
7062 { 0, CODE_FOR_spe_evldh
, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH
},
7063 { 0, CODE_FOR_spe_evlwhe
, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE
},
7064 { 0, CODE_FOR_spe_evlwhou
, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU
},
7065 { 0, CODE_FOR_spe_evlwhos
, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS
},
7066 { 0, CODE_FOR_spe_evlwwsplat
, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT
},
7067 { 0, CODE_FOR_spe_evlwhsplat
, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT
},
7068 { 0, CODE_FOR_spe_evlhhesplat
, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT
},
7069 { 0, CODE_FOR_spe_evlhhousplat
, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT
},
7070 { 0, CODE_FOR_spe_evlhhossplat
, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT
}
7073 /* Expand the builtin in EXP and store the result in TARGET. Store
7074 true in *EXPANDEDP if we found a builtin to expand.
7076 This expands the SPE builtins that are not simple unary and binary
7079 spe_expand_builtin (tree exp
, rtx target
, bool *expandedp
)
7081 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7082 tree arglist
= TREE_OPERAND (exp
, 1);
7084 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
7085 enum insn_code icode
;
7086 enum machine_mode tmode
, mode0
;
7088 struct builtin_description
*d
;
7093 /* Syntax check for a 5-bit unsigned immediate. */
7096 case SPE_BUILTIN_EVSTDD
:
7097 case SPE_BUILTIN_EVSTDH
:
7098 case SPE_BUILTIN_EVSTDW
:
7099 case SPE_BUILTIN_EVSTWHE
:
7100 case SPE_BUILTIN_EVSTWHO
:
7101 case SPE_BUILTIN_EVSTWWE
:
7102 case SPE_BUILTIN_EVSTWWO
:
7103 arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
7104 if (TREE_CODE (arg1
) != INTEGER_CST
7105 || TREE_INT_CST_LOW (arg1
) & ~0x1f)
7107 error ("argument 2 must be a 5-bit unsigned literal");
7115 /* The evsplat*i instructions are not quite generic. */
7118 case SPE_BUILTIN_EVSPLATFI
:
7119 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi
,
7121 case SPE_BUILTIN_EVSPLATI
:
7122 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati
,
7128 d
= (struct builtin_description
*) bdesc_2arg_spe
;
7129 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg_spe
); ++i
, ++d
)
7130 if (d
->code
== fcode
)
7131 return rs6000_expand_binop_builtin (d
->icode
, arglist
, target
);
7133 d
= (struct builtin_description
*) bdesc_spe_predicates
;
7134 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_predicates
); ++i
, ++d
)
7135 if (d
->code
== fcode
)
7136 return spe_expand_predicate_builtin (d
->icode
, arglist
, target
);
7138 d
= (struct builtin_description
*) bdesc_spe_evsel
;
7139 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_evsel
); ++i
, ++d
)
7140 if (d
->code
== fcode
)
7141 return spe_expand_evsel_builtin (d
->icode
, arglist
, target
);
7145 case SPE_BUILTIN_EVSTDDX
:
7146 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx
, arglist
);
7147 case SPE_BUILTIN_EVSTDHX
:
7148 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx
, arglist
);
7149 case SPE_BUILTIN_EVSTDWX
:
7150 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx
, arglist
);
7151 case SPE_BUILTIN_EVSTWHEX
:
7152 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex
, arglist
);
7153 case SPE_BUILTIN_EVSTWHOX
:
7154 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox
, arglist
);
7155 case SPE_BUILTIN_EVSTWWEX
:
7156 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex
, arglist
);
7157 case SPE_BUILTIN_EVSTWWOX
:
7158 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox
, arglist
);
7159 case SPE_BUILTIN_EVSTDD
:
7160 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd
, arglist
);
7161 case SPE_BUILTIN_EVSTDH
:
7162 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh
, arglist
);
7163 case SPE_BUILTIN_EVSTDW
:
7164 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw
, arglist
);
7165 case SPE_BUILTIN_EVSTWHE
:
7166 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe
, arglist
);
7167 case SPE_BUILTIN_EVSTWHO
:
7168 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho
, arglist
);
7169 case SPE_BUILTIN_EVSTWWE
:
7170 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe
, arglist
);
7171 case SPE_BUILTIN_EVSTWWO
:
7172 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo
, arglist
);
7173 case SPE_BUILTIN_MFSPEFSCR
:
7174 icode
= CODE_FOR_spe_mfspefscr
;
7175 tmode
= insn_data
[icode
].operand
[0].mode
;
7178 || GET_MODE (target
) != tmode
7179 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
7180 target
= gen_reg_rtx (tmode
);
7182 pat
= GEN_FCN (icode
) (target
);
7187 case SPE_BUILTIN_MTSPEFSCR
:
7188 icode
= CODE_FOR_spe_mtspefscr
;
7189 arg0
= TREE_VALUE (arglist
);
7190 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
7191 mode0
= insn_data
[icode
].operand
[0].mode
;
7193 if (arg0
== error_mark_node
)
7196 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
7197 op0
= copy_to_mode_reg (mode0
, op0
);
7199 pat
= GEN_FCN (icode
) (op0
);
7212 spe_expand_predicate_builtin (enum insn_code icode
, tree arglist
, rtx target
)
7214 rtx pat
, scratch
, tmp
;
7215 tree form
= TREE_VALUE (arglist
);
7216 tree arg0
= TREE_VALUE (TREE_CHAIN (arglist
));
7217 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
7218 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
7219 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
7220 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
7221 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
7225 if (TREE_CODE (form
) != INTEGER_CST
)
7227 error ("argument 1 of __builtin_spe_predicate must be a constant");
7231 form_int
= TREE_INT_CST_LOW (form
);
7233 gcc_assert (mode0
== mode1
);
7235 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
7239 || GET_MODE (target
) != SImode
7240 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, SImode
))
7241 target
= gen_reg_rtx (SImode
);
7243 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
7244 op0
= copy_to_mode_reg (mode0
, op0
);
7245 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
7246 op1
= copy_to_mode_reg (mode1
, op1
);
7248 scratch
= gen_reg_rtx (CCmode
);
7250 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
);
7255 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
7256 _lower_. We use one compare, but look in different bits of the
7257 CR for each variant.
7259 There are 2 elements in each SPE simd type (upper/lower). The CR
7260 bits are set as follows:
7262 BIT0 | BIT 1 | BIT 2 | BIT 3
7263 U | L | (U | L) | (U & L)
7265 So, for an "all" relationship, BIT 3 would be set.
7266 For an "any" relationship, BIT 2 would be set. Etc.
7268 Following traditional nomenclature, these bits map to:
7270 BIT0 | BIT 1 | BIT 2 | BIT 3
7273 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
7278 /* All variant. OV bit. */
7280 /* We need to get to the OV bit, which is the ORDERED bit. We
7281 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
7282 that's ugly and will make validate_condition_mode die.
7283 So let's just use another pattern. */
7284 emit_insn (gen_move_from_CR_ov_bit (target
, scratch
));
7286 /* Any variant. EQ bit. */
7290 /* Upper variant. LT bit. */
7294 /* Lower variant. GT bit. */
7299 error ("argument 1 of __builtin_spe_predicate is out of range");
7303 tmp
= gen_rtx_fmt_ee (code
, SImode
, scratch
, const0_rtx
);
7304 emit_move_insn (target
, tmp
);
7309 /* The evsel builtins look like this:
7311 e = __builtin_spe_evsel_OP (a, b, c, d);
7315 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
7316 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
7320 spe_expand_evsel_builtin (enum insn_code icode
, tree arglist
, rtx target
)
7323 tree arg0
= TREE_VALUE (arglist
);
7324 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
7325 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
7326 tree arg3
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist
))));
7327 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
7328 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
7329 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
7330 rtx op3
= expand_expr (arg3
, NULL_RTX
, VOIDmode
, 0);
7331 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
7332 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
7334 gcc_assert (mode0
== mode1
);
7336 if (arg0
== error_mark_node
|| arg1
== error_mark_node
7337 || arg2
== error_mark_node
|| arg3
== error_mark_node
)
7341 || GET_MODE (target
) != mode0
7342 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, mode0
))
7343 target
= gen_reg_rtx (mode0
);
7345 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
7346 op0
= copy_to_mode_reg (mode0
, op0
);
7347 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
7348 op1
= copy_to_mode_reg (mode0
, op1
);
7349 if (! (*insn_data
[icode
].operand
[1].predicate
) (op2
, mode1
))
7350 op2
= copy_to_mode_reg (mode0
, op2
);
7351 if (! (*insn_data
[icode
].operand
[1].predicate
) (op3
, mode1
))
7352 op3
= copy_to_mode_reg (mode0
, op3
);
7354 /* Generate the compare. */
7355 scratch
= gen_reg_rtx (CCmode
);
7356 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
);
7361 if (mode0
== V2SImode
)
7362 emit_insn (gen_spe_evsel (target
, op2
, op3
, scratch
));
7364 emit_insn (gen_spe_evsel_fs (target
, op2
, op3
, scratch
));
7369 /* Expand an expression EXP that calls a built-in function,
7370 with result going to TARGET if that's convenient
7371 (and in mode MODE if that's convenient).
7372 SUBTARGET may be used as the target for computing one of EXP's operands.
7373 IGNORE is nonzero if the value is to be ignored. */
7376 rs6000_expand_builtin (tree exp
, rtx target
, rtx subtarget ATTRIBUTE_UNUSED
,
7377 enum machine_mode mode ATTRIBUTE_UNUSED
,
7378 int ignore ATTRIBUTE_UNUSED
)
7380 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7381 tree arglist
= TREE_OPERAND (exp
, 1);
7382 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
7383 struct builtin_description
*d
;
7388 if (fcode
== ALTIVEC_BUILTIN_MASK_FOR_LOAD
7389 || fcode
== ALTIVEC_BUILTIN_MASK_FOR_STORE
)
7391 int icode
= (int) CODE_FOR_altivec_lvsr
;
7392 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
7393 enum machine_mode mode
= insn_data
[icode
].operand
[1].mode
;
7397 gcc_assert (TARGET_ALTIVEC
);
7399 arg
= TREE_VALUE (arglist
);
7400 gcc_assert (TREE_CODE (TREE_TYPE (arg
)) == POINTER_TYPE
);
7401 op
= expand_expr (arg
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
7402 addr
= memory_address (mode
, op
);
7403 if (fcode
== ALTIVEC_BUILTIN_MASK_FOR_STORE
)
7407 /* For the load case need to negate the address. */
7408 op
= gen_reg_rtx (GET_MODE (addr
));
7409 emit_insn (gen_rtx_SET (VOIDmode
, op
,
7410 gen_rtx_NEG (GET_MODE (addr
), addr
)));
7412 op
= gen_rtx_MEM (mode
, op
);
7415 || GET_MODE (target
) != tmode
7416 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
7417 target
= gen_reg_rtx (tmode
);
7419 /*pat = gen_altivec_lvsr (target, op);*/
7420 pat
= GEN_FCN (icode
) (target
, op
);
7430 ret
= altivec_expand_builtin (exp
, target
, &success
);
7437 ret
= spe_expand_builtin (exp
, target
, &success
);
7443 gcc_assert (TARGET_ALTIVEC
|| TARGET_SPE
);
7445 /* Handle simple unary operations. */
7446 d
= (struct builtin_description
*) bdesc_1arg
;
7447 for (i
= 0; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
7448 if (d
->code
== fcode
)
7449 return rs6000_expand_unop_builtin (d
->icode
, arglist
, target
);
7451 /* Handle simple binary operations. */
7452 d
= (struct builtin_description
*) bdesc_2arg
;
7453 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
7454 if (d
->code
== fcode
)
7455 return rs6000_expand_binop_builtin (d
->icode
, arglist
, target
);
7457 /* Handle simple ternary operations. */
7458 d
= (struct builtin_description
*) bdesc_3arg
;
7459 for (i
= 0; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
7460 if (d
->code
== fcode
)
7461 return rs6000_expand_ternop_builtin (d
->icode
, arglist
, target
);
7467 build_opaque_vector_type (tree node
, int nunits
)
7469 node
= copy_node (node
);
7470 TYPE_MAIN_VARIANT (node
) = node
;
7471 return build_vector_type (node
, nunits
);
7475 rs6000_init_builtins (void)
7477 V2SI_type_node
= build_vector_type (intSI_type_node
, 2);
7478 V2SF_type_node
= build_vector_type (float_type_node
, 2);
7479 V4HI_type_node
= build_vector_type (intHI_type_node
, 4);
7480 V4SI_type_node
= build_vector_type (intSI_type_node
, 4);
7481 V4SF_type_node
= build_vector_type (float_type_node
, 4);
7482 V8HI_type_node
= build_vector_type (intHI_type_node
, 8);
7483 V16QI_type_node
= build_vector_type (intQI_type_node
, 16);
7485 unsigned_V16QI_type_node
= build_vector_type (unsigned_intQI_type_node
, 16);
7486 unsigned_V8HI_type_node
= build_vector_type (unsigned_intHI_type_node
, 8);
7487 unsigned_V4SI_type_node
= build_vector_type (unsigned_intSI_type_node
, 4);
7489 opaque_V2SF_type_node
= build_opaque_vector_type (float_type_node
, 2);
7490 opaque_V2SI_type_node
= build_opaque_vector_type (intSI_type_node
, 2);
7491 opaque_p_V2SI_type_node
= build_pointer_type (opaque_V2SI_type_node
);
7492 opaque_V4SI_type_node
= copy_node (V4SI_type_node
);
7494 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
7495 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
7496 'vector unsigned short'. */
7498 bool_char_type_node
= build_distinct_type_copy (unsigned_intQI_type_node
);
7499 bool_short_type_node
= build_distinct_type_copy (unsigned_intHI_type_node
);
7500 bool_int_type_node
= build_distinct_type_copy (unsigned_intSI_type_node
);
7501 pixel_type_node
= build_distinct_type_copy (unsigned_intHI_type_node
);
7503 long_integer_type_internal_node
= long_integer_type_node
;
7504 long_unsigned_type_internal_node
= long_unsigned_type_node
;
7505 intQI_type_internal_node
= intQI_type_node
;
7506 uintQI_type_internal_node
= unsigned_intQI_type_node
;
7507 intHI_type_internal_node
= intHI_type_node
;
7508 uintHI_type_internal_node
= unsigned_intHI_type_node
;
7509 intSI_type_internal_node
= intSI_type_node
;
7510 uintSI_type_internal_node
= unsigned_intSI_type_node
;
7511 float_type_internal_node
= float_type_node
;
7512 void_type_internal_node
= void_type_node
;
7514 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7515 get_identifier ("__bool char"),
7516 bool_char_type_node
));
7517 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7518 get_identifier ("__bool short"),
7519 bool_short_type_node
));
7520 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7521 get_identifier ("__bool int"),
7522 bool_int_type_node
));
7523 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7524 get_identifier ("__pixel"),
7527 bool_V16QI_type_node
= build_vector_type (bool_char_type_node
, 16);
7528 bool_V8HI_type_node
= build_vector_type (bool_short_type_node
, 8);
7529 bool_V4SI_type_node
= build_vector_type (bool_int_type_node
, 4);
7530 pixel_V8HI_type_node
= build_vector_type (pixel_type_node
, 8);
7532 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7533 get_identifier ("__vector unsigned char"),
7534 unsigned_V16QI_type_node
));
7535 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7536 get_identifier ("__vector signed char"),
7538 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7539 get_identifier ("__vector __bool char"),
7540 bool_V16QI_type_node
));
7542 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7543 get_identifier ("__vector unsigned short"),
7544 unsigned_V8HI_type_node
));
7545 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7546 get_identifier ("__vector signed short"),
7548 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7549 get_identifier ("__vector __bool short"),
7550 bool_V8HI_type_node
));
7552 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7553 get_identifier ("__vector unsigned int"),
7554 unsigned_V4SI_type_node
));
7555 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7556 get_identifier ("__vector signed int"),
7558 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7559 get_identifier ("__vector __bool int"),
7560 bool_V4SI_type_node
));
7562 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7563 get_identifier ("__vector float"),
7565 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7566 get_identifier ("__vector __pixel"),
7567 pixel_V8HI_type_node
));
7570 spe_init_builtins ();
7572 altivec_init_builtins ();
7573 if (TARGET_ALTIVEC
|| TARGET_SPE
)
7574 rs6000_common_init_builtins ();
7577 /* Search through a set of builtins and enable the mask bits.
7578 DESC is an array of builtins.
7579 SIZE is the total number of builtins.
7580 START is the builtin enum at which to start.
7581 END is the builtin enum at which to end. */
7583 enable_mask_for_builtins (struct builtin_description
*desc
, int size
,
7584 enum rs6000_builtins start
,
7585 enum rs6000_builtins end
)
7589 for (i
= 0; i
< size
; ++i
)
7590 if (desc
[i
].code
== start
)
7596 for (; i
< size
; ++i
)
7598 /* Flip all the bits on. */
7599 desc
[i
].mask
= target_flags
;
7600 if (desc
[i
].code
== end
)
7606 spe_init_builtins (void)
7608 tree endlink
= void_list_node
;
7609 tree puint_type_node
= build_pointer_type (unsigned_type_node
);
7610 tree pushort_type_node
= build_pointer_type (short_unsigned_type_node
);
7611 struct builtin_description
*d
;
7614 tree v2si_ftype_4_v2si
7615 = build_function_type
7616 (opaque_V2SI_type_node
,
7617 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
7618 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
7619 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
7620 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
7623 tree v2sf_ftype_4_v2sf
7624 = build_function_type
7625 (opaque_V2SF_type_node
,
7626 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
7627 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
7628 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
7629 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
7632 tree int_ftype_int_v2si_v2si
7633 = build_function_type
7635 tree_cons (NULL_TREE
, integer_type_node
,
7636 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
7637 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
7640 tree int_ftype_int_v2sf_v2sf
7641 = build_function_type
7643 tree_cons (NULL_TREE
, integer_type_node
,
7644 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
7645 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
7648 tree void_ftype_v2si_puint_int
7649 = build_function_type (void_type_node
,
7650 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
7651 tree_cons (NULL_TREE
, puint_type_node
,
7652 tree_cons (NULL_TREE
,
7656 tree void_ftype_v2si_puint_char
7657 = build_function_type (void_type_node
,
7658 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
7659 tree_cons (NULL_TREE
, puint_type_node
,
7660 tree_cons (NULL_TREE
,
7664 tree void_ftype_v2si_pv2si_int
7665 = build_function_type (void_type_node
,
7666 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
7667 tree_cons (NULL_TREE
, opaque_p_V2SI_type_node
,
7668 tree_cons (NULL_TREE
,
7672 tree void_ftype_v2si_pv2si_char
7673 = build_function_type (void_type_node
,
7674 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
7675 tree_cons (NULL_TREE
, opaque_p_V2SI_type_node
,
7676 tree_cons (NULL_TREE
,
7681 = build_function_type (void_type_node
,
7682 tree_cons (NULL_TREE
, integer_type_node
, endlink
));
7685 = build_function_type (integer_type_node
, endlink
);
7687 tree v2si_ftype_pv2si_int
7688 = build_function_type (opaque_V2SI_type_node
,
7689 tree_cons (NULL_TREE
, opaque_p_V2SI_type_node
,
7690 tree_cons (NULL_TREE
, integer_type_node
,
7693 tree v2si_ftype_puint_int
7694 = build_function_type (opaque_V2SI_type_node
,
7695 tree_cons (NULL_TREE
, puint_type_node
,
7696 tree_cons (NULL_TREE
, integer_type_node
,
7699 tree v2si_ftype_pushort_int
7700 = build_function_type (opaque_V2SI_type_node
,
7701 tree_cons (NULL_TREE
, pushort_type_node
,
7702 tree_cons (NULL_TREE
, integer_type_node
,
7705 tree v2si_ftype_signed_char
7706 = build_function_type (opaque_V2SI_type_node
,
7707 tree_cons (NULL_TREE
, signed_char_type_node
,
7710 /* The initialization of the simple binary and unary builtins is
7711 done in rs6000_common_init_builtins, but we have to enable the
7712 mask bits here manually because we have run out of `target_flags'
7713 bits. We really need to redesign this mask business. */
7715 enable_mask_for_builtins ((struct builtin_description
*) bdesc_2arg
,
7716 ARRAY_SIZE (bdesc_2arg
),
7719 enable_mask_for_builtins ((struct builtin_description
*) bdesc_1arg
,
7720 ARRAY_SIZE (bdesc_1arg
),
7722 SPE_BUILTIN_EVSUBFUSIAAW
);
7723 enable_mask_for_builtins ((struct builtin_description
*) bdesc_spe_predicates
,
7724 ARRAY_SIZE (bdesc_spe_predicates
),
7725 SPE_BUILTIN_EVCMPEQ
,
7726 SPE_BUILTIN_EVFSTSTLT
);
7727 enable_mask_for_builtins ((struct builtin_description
*) bdesc_spe_evsel
,
7728 ARRAY_SIZE (bdesc_spe_evsel
),
7729 SPE_BUILTIN_EVSEL_CMPGTS
,
7730 SPE_BUILTIN_EVSEL_FSTSTEQ
);
7732 (*lang_hooks
.decls
.pushdecl
)
7733 (build_decl (TYPE_DECL
, get_identifier ("__ev64_opaque__"),
7734 opaque_V2SI_type_node
));
7736 /* Initialize irregular SPE builtins. */
7738 def_builtin (target_flags
, "__builtin_spe_mtspefscr", void_ftype_int
, SPE_BUILTIN_MTSPEFSCR
);
7739 def_builtin (target_flags
, "__builtin_spe_mfspefscr", int_ftype_void
, SPE_BUILTIN_MFSPEFSCR
);
7740 def_builtin (target_flags
, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDDX
);
7741 def_builtin (target_flags
, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDHX
);
7742 def_builtin (target_flags
, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDWX
);
7743 def_builtin (target_flags
, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWHEX
);
7744 def_builtin (target_flags
, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWHOX
);
7745 def_builtin (target_flags
, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWWEX
);
7746 def_builtin (target_flags
, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWWOX
);
7747 def_builtin (target_flags
, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDD
);
7748 def_builtin (target_flags
, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDH
);
7749 def_builtin (target_flags
, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDW
);
7750 def_builtin (target_flags
, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWHE
);
7751 def_builtin (target_flags
, "__builtin_spe_evstwho", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWHO
);
7752 def_builtin (target_flags
, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWWE
);
7753 def_builtin (target_flags
, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWWO
);
7754 def_builtin (target_flags
, "__builtin_spe_evsplatfi", v2si_ftype_signed_char
, SPE_BUILTIN_EVSPLATFI
);
7755 def_builtin (target_flags
, "__builtin_spe_evsplati", v2si_ftype_signed_char
, SPE_BUILTIN_EVSPLATI
);
7758 def_builtin (target_flags
, "__builtin_spe_evlddx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDDX
);
7759 def_builtin (target_flags
, "__builtin_spe_evldwx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDWX
);
7760 def_builtin (target_flags
, "__builtin_spe_evldhx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDHX
);
7761 def_builtin (target_flags
, "__builtin_spe_evlwhex", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHEX
);
7762 def_builtin (target_flags
, "__builtin_spe_evlwhoux", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOUX
);
7763 def_builtin (target_flags
, "__builtin_spe_evlwhosx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOSX
);
7764 def_builtin (target_flags
, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWWSPLATX
);
7765 def_builtin (target_flags
, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHSPLATX
);
7766 def_builtin (target_flags
, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHESPLATX
);
7767 def_builtin (target_flags
, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOUSPLATX
);
7768 def_builtin (target_flags
, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOSSPLATX
);
7769 def_builtin (target_flags
, "__builtin_spe_evldd", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDD
);
7770 def_builtin (target_flags
, "__builtin_spe_evldw", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDW
);
7771 def_builtin (target_flags
, "__builtin_spe_evldh", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDH
);
7772 def_builtin (target_flags
, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHESPLAT
);
7773 def_builtin (target_flags
, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOSSPLAT
);
7774 def_builtin (target_flags
, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOUSPLAT
);
7775 def_builtin (target_flags
, "__builtin_spe_evlwhe", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHE
);
7776 def_builtin (target_flags
, "__builtin_spe_evlwhos", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOS
);
7777 def_builtin (target_flags
, "__builtin_spe_evlwhou", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOU
);
7778 def_builtin (target_flags
, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHSPLAT
);
7779 def_builtin (target_flags
, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWWSPLAT
);
7782 d
= (struct builtin_description
*) bdesc_spe_predicates
;
7783 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_predicates
); ++i
, d
++)
7787 switch (insn_data
[d
->icode
].operand
[1].mode
)
7790 type
= int_ftype_int_v2si_v2si
;
7793 type
= int_ftype_int_v2sf_v2sf
;
7799 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
7802 /* Evsel predicates. */
7803 d
= (struct builtin_description
*) bdesc_spe_evsel
;
7804 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_evsel
); ++i
, d
++)
7808 switch (insn_data
[d
->icode
].operand
[1].mode
)
7811 type
= v2si_ftype_4_v2si
;
7814 type
= v2sf_ftype_4_v2sf
;
7820 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
7825 altivec_init_builtins (void)
7827 struct builtin_description
*d
;
7828 struct builtin_description_predicates
*dp
;
7830 tree pfloat_type_node
= build_pointer_type (float_type_node
);
7831 tree pint_type_node
= build_pointer_type (integer_type_node
);
7832 tree pshort_type_node
= build_pointer_type (short_integer_type_node
);
7833 tree pchar_type_node
= build_pointer_type (char_type_node
);
7835 tree pvoid_type_node
= build_pointer_type (void_type_node
);
7837 tree pcfloat_type_node
= build_pointer_type (build_qualified_type (float_type_node
, TYPE_QUAL_CONST
));
7838 tree pcint_type_node
= build_pointer_type (build_qualified_type (integer_type_node
, TYPE_QUAL_CONST
));
7839 tree pcshort_type_node
= build_pointer_type (build_qualified_type (short_integer_type_node
, TYPE_QUAL_CONST
));
7840 tree pcchar_type_node
= build_pointer_type (build_qualified_type (char_type_node
, TYPE_QUAL_CONST
));
7842 tree pcvoid_type_node
= build_pointer_type (build_qualified_type (void_type_node
, TYPE_QUAL_CONST
));
7844 tree int_ftype_opaque
7845 = build_function_type_list (integer_type_node
,
7846 opaque_V4SI_type_node
, NULL_TREE
);
7848 tree opaque_ftype_opaque_int
7849 = build_function_type_list (opaque_V4SI_type_node
,
7850 opaque_V4SI_type_node
, integer_type_node
, NULL_TREE
);
7851 tree opaque_ftype_opaque_opaque_int
7852 = build_function_type_list (opaque_V4SI_type_node
,
7853 opaque_V4SI_type_node
, opaque_V4SI_type_node
,
7854 integer_type_node
, NULL_TREE
);
7855 tree int_ftype_int_opaque_opaque
7856 = build_function_type_list (integer_type_node
,
7857 integer_type_node
, opaque_V4SI_type_node
,
7858 opaque_V4SI_type_node
, NULL_TREE
);
7859 tree int_ftype_int_v4si_v4si
7860 = build_function_type_list (integer_type_node
,
7861 integer_type_node
, V4SI_type_node
,
7862 V4SI_type_node
, NULL_TREE
);
7863 tree v4sf_ftype_pcfloat
7864 = build_function_type_list (V4SF_type_node
, pcfloat_type_node
, NULL_TREE
);
7865 tree void_ftype_pfloat_v4sf
7866 = build_function_type_list (void_type_node
,
7867 pfloat_type_node
, V4SF_type_node
, NULL_TREE
);
7868 tree v4si_ftype_pcint
7869 = build_function_type_list (V4SI_type_node
, pcint_type_node
, NULL_TREE
);
7870 tree void_ftype_pint_v4si
7871 = build_function_type_list (void_type_node
,
7872 pint_type_node
, V4SI_type_node
, NULL_TREE
);
7873 tree v8hi_ftype_pcshort
7874 = build_function_type_list (V8HI_type_node
, pcshort_type_node
, NULL_TREE
);
7875 tree void_ftype_pshort_v8hi
7876 = build_function_type_list (void_type_node
,
7877 pshort_type_node
, V8HI_type_node
, NULL_TREE
);
7878 tree v16qi_ftype_pcchar
7879 = build_function_type_list (V16QI_type_node
, pcchar_type_node
, NULL_TREE
);
7880 tree void_ftype_pchar_v16qi
7881 = build_function_type_list (void_type_node
,
7882 pchar_type_node
, V16QI_type_node
, NULL_TREE
);
7883 tree void_ftype_v4si
7884 = build_function_type_list (void_type_node
, V4SI_type_node
, NULL_TREE
);
7885 tree v8hi_ftype_void
7886 = build_function_type (V8HI_type_node
, void_list_node
);
7887 tree void_ftype_void
7888 = build_function_type (void_type_node
, void_list_node
);
7890 = build_function_type_list (void_type_node
, integer_type_node
, NULL_TREE
);
7892 tree opaque_ftype_long_pcvoid
7893 = build_function_type_list (opaque_V4SI_type_node
,
7894 long_integer_type_node
, pcvoid_type_node
, NULL_TREE
);
7895 tree v16qi_ftype_long_pcvoid
7896 = build_function_type_list (V16QI_type_node
,
7897 long_integer_type_node
, pcvoid_type_node
, NULL_TREE
);
7898 tree v8hi_ftype_long_pcvoid
7899 = build_function_type_list (V8HI_type_node
,
7900 long_integer_type_node
, pcvoid_type_node
, NULL_TREE
);
7901 tree v4si_ftype_long_pcvoid
7902 = build_function_type_list (V4SI_type_node
,
7903 long_integer_type_node
, pcvoid_type_node
, NULL_TREE
);
7905 tree void_ftype_opaque_long_pvoid
7906 = build_function_type_list (void_type_node
,
7907 opaque_V4SI_type_node
, long_integer_type_node
,
7908 pvoid_type_node
, NULL_TREE
);
7909 tree void_ftype_v4si_long_pvoid
7910 = build_function_type_list (void_type_node
,
7911 V4SI_type_node
, long_integer_type_node
,
7912 pvoid_type_node
, NULL_TREE
);
7913 tree void_ftype_v16qi_long_pvoid
7914 = build_function_type_list (void_type_node
,
7915 V16QI_type_node
, long_integer_type_node
,
7916 pvoid_type_node
, NULL_TREE
);
7917 tree void_ftype_v8hi_long_pvoid
7918 = build_function_type_list (void_type_node
,
7919 V8HI_type_node
, long_integer_type_node
,
7920 pvoid_type_node
, NULL_TREE
);
7921 tree int_ftype_int_v8hi_v8hi
7922 = build_function_type_list (integer_type_node
,
7923 integer_type_node
, V8HI_type_node
,
7924 V8HI_type_node
, NULL_TREE
);
7925 tree int_ftype_int_v16qi_v16qi
7926 = build_function_type_list (integer_type_node
,
7927 integer_type_node
, V16QI_type_node
,
7928 V16QI_type_node
, NULL_TREE
);
7929 tree int_ftype_int_v4sf_v4sf
7930 = build_function_type_list (integer_type_node
,
7931 integer_type_node
, V4SF_type_node
,
7932 V4SF_type_node
, NULL_TREE
);
7933 tree v4si_ftype_v4si
7934 = build_function_type_list (V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
7935 tree v8hi_ftype_v8hi
7936 = build_function_type_list (V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
7937 tree v16qi_ftype_v16qi
7938 = build_function_type_list (V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
7939 tree v4sf_ftype_v4sf
7940 = build_function_type_list (V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
7941 tree void_ftype_pcvoid_int_int
7942 = build_function_type_list (void_type_node
,
7943 pcvoid_type_node
, integer_type_node
,
7944 integer_type_node
, NULL_TREE
);
7946 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat
,
7947 ALTIVEC_BUILTIN_LD_INTERNAL_4sf
);
7948 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf
,
7949 ALTIVEC_BUILTIN_ST_INTERNAL_4sf
);
7950 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint
,
7951 ALTIVEC_BUILTIN_LD_INTERNAL_4si
);
7952 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si
,
7953 ALTIVEC_BUILTIN_ST_INTERNAL_4si
);
7954 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort
,
7955 ALTIVEC_BUILTIN_LD_INTERNAL_8hi
);
7956 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi
,
7957 ALTIVEC_BUILTIN_ST_INTERNAL_8hi
);
7958 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar
,
7959 ALTIVEC_BUILTIN_LD_INTERNAL_16qi
);
7960 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi
,
7961 ALTIVEC_BUILTIN_ST_INTERNAL_16qi
);
7962 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mtvscr", void_ftype_v4si
, ALTIVEC_BUILTIN_MTVSCR
);
7963 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mfvscr", v8hi_ftype_void
, ALTIVEC_BUILTIN_MFVSCR
);
7964 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dssall", void_ftype_void
, ALTIVEC_BUILTIN_DSSALL
);
7965 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dss", void_ftype_int
, ALTIVEC_BUILTIN_DSS
);
7966 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVSL
);
7967 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVSR
);
7968 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVEBX
);
7969 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVEHX
);
7970 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVEWX
);
7971 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVXL
);
7972 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVX
);
7973 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid
, ALTIVEC_BUILTIN_STVX
);
7974 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid
, ALTIVEC_BUILTIN_STVEWX
);
7975 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid
, ALTIVEC_BUILTIN_STVXL
);
7976 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid
, ALTIVEC_BUILTIN_STVEBX
);
7977 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid
, ALTIVEC_BUILTIN_STVEHX
);
7978 def_builtin (MASK_ALTIVEC
, "__builtin_vec_ld", opaque_ftype_long_pcvoid
, ALTIVEC_BUILTIN_VEC_LD
);
7979 def_builtin (MASK_ALTIVEC
, "__builtin_vec_lde", opaque_ftype_long_pcvoid
, ALTIVEC_BUILTIN_VEC_LDE
);
7980 def_builtin (MASK_ALTIVEC
, "__builtin_vec_ldl", opaque_ftype_long_pcvoid
, ALTIVEC_BUILTIN_VEC_LDL
);
7981 def_builtin (MASK_ALTIVEC
, "__builtin_vec_lvsl", v16qi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_VEC_LVSL
);
7982 def_builtin (MASK_ALTIVEC
, "__builtin_vec_lvsr", v16qi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_VEC_LVSR
);
7983 def_builtin (MASK_ALTIVEC
, "__builtin_vec_lvebx", v16qi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_VEC_LVEBX
);
7984 def_builtin (MASK_ALTIVEC
, "__builtin_vec_lvehx", v8hi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_VEC_LVEHX
);
7985 def_builtin (MASK_ALTIVEC
, "__builtin_vec_lvewx", v4si_ftype_long_pcvoid
, ALTIVEC_BUILTIN_VEC_LVEWX
);
7986 def_builtin (MASK_ALTIVEC
, "__builtin_vec_st", void_ftype_opaque_long_pvoid
, ALTIVEC_BUILTIN_VEC_ST
);
7987 def_builtin (MASK_ALTIVEC
, "__builtin_vec_ste", void_ftype_opaque_long_pvoid
, ALTIVEC_BUILTIN_VEC_STE
);
7988 def_builtin (MASK_ALTIVEC
, "__builtin_vec_stl", void_ftype_opaque_long_pvoid
, ALTIVEC_BUILTIN_VEC_STL
);
7989 def_builtin (MASK_ALTIVEC
, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid
, ALTIVEC_BUILTIN_VEC_STVEWX
);
7990 def_builtin (MASK_ALTIVEC
, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid
, ALTIVEC_BUILTIN_VEC_STVEBX
);
7991 def_builtin (MASK_ALTIVEC
, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid
, ALTIVEC_BUILTIN_VEC_STVEHX
);
7993 def_builtin (MASK_ALTIVEC
, "__builtin_vec_step", int_ftype_opaque
, ALTIVEC_BUILTIN_VEC_STEP
);
7995 def_builtin (MASK_ALTIVEC
, "__builtin_vec_sld", opaque_ftype_opaque_opaque_int
, ALTIVEC_BUILTIN_VEC_SLD
);
7996 def_builtin (MASK_ALTIVEC
, "__builtin_vec_splat", opaque_ftype_opaque_int
, ALTIVEC_BUILTIN_VEC_SPLAT
);
7997 def_builtin (MASK_ALTIVEC
, "__builtin_vec_vspltw", opaque_ftype_opaque_int
, ALTIVEC_BUILTIN_VEC_VSPLTW
);
7998 def_builtin (MASK_ALTIVEC
, "__builtin_vec_vsplth", opaque_ftype_opaque_int
, ALTIVEC_BUILTIN_VEC_VSPLTH
);
7999 def_builtin (MASK_ALTIVEC
, "__builtin_vec_vspltb", opaque_ftype_opaque_int
, ALTIVEC_BUILTIN_VEC_VSPLTB
);
8000 def_builtin (MASK_ALTIVEC
, "__builtin_vec_ctf", opaque_ftype_opaque_int
, ALTIVEC_BUILTIN_VEC_CTF
);
8001 def_builtin (MASK_ALTIVEC
, "__builtin_vec_vcfsx", opaque_ftype_opaque_int
, ALTIVEC_BUILTIN_VEC_VCFSX
);
8002 def_builtin (MASK_ALTIVEC
, "__builtin_vec_vcfux", opaque_ftype_opaque_int
, ALTIVEC_BUILTIN_VEC_VCFUX
);
8003 def_builtin (MASK_ALTIVEC
, "__builtin_vec_cts", opaque_ftype_opaque_int
, ALTIVEC_BUILTIN_VEC_CTS
);
8004 def_builtin (MASK_ALTIVEC
, "__builtin_vec_ctu", opaque_ftype_opaque_int
, ALTIVEC_BUILTIN_VEC_CTU
);
8006 /* Add the DST variants. */
8007 d
= (struct builtin_description
*) bdesc_dst
;
8008 for (i
= 0; i
< ARRAY_SIZE (bdesc_dst
); i
++, d
++)
8009 def_builtin (d
->mask
, d
->name
, void_ftype_pcvoid_int_int
, d
->code
);
8011 /* Initialize the predicates. */
8012 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
8013 for (i
= 0; i
< ARRAY_SIZE (bdesc_altivec_preds
); i
++, dp
++)
8015 enum machine_mode mode1
;
8017 bool is_overloaded
= dp
->code
>= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8018 && dp
->code
<= ALTIVEC_BUILTIN_OVERLOADED_LAST
;
8023 mode1
= insn_data
[dp
->icode
].operand
[1].mode
;
8028 type
= int_ftype_int_opaque_opaque
;
8031 type
= int_ftype_int_v4si_v4si
;
8034 type
= int_ftype_int_v8hi_v8hi
;
8037 type
= int_ftype_int_v16qi_v16qi
;
8040 type
= int_ftype_int_v4sf_v4sf
;
8046 def_builtin (dp
->mask
, dp
->name
, type
, dp
->code
);
8049 /* Initialize the abs* operators. */
8050 d
= (struct builtin_description
*) bdesc_abs
;
8051 for (i
= 0; i
< ARRAY_SIZE (bdesc_abs
); i
++, d
++)
8053 enum machine_mode mode0
;
8056 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
8061 type
= v4si_ftype_v4si
;
8064 type
= v8hi_ftype_v8hi
;
8067 type
= v16qi_ftype_v16qi
;
8070 type
= v4sf_ftype_v4sf
;
8076 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
8083 /* Initialize target builtin that implements
8084 targetm.vectorize.builtin_mask_for_load. */
8086 decl
= lang_hooks
.builtin_function ("__builtin_altivec_mask_for_load",
8087 v16qi_ftype_long_pcvoid
,
8088 ALTIVEC_BUILTIN_MASK_FOR_LOAD
,
8090 tree_cons (get_identifier ("const"),
8091 NULL_TREE
, NULL_TREE
));
8092 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
8093 altivec_builtin_mask_for_load
= decl
;
8098 rs6000_common_init_builtins (void)
8100 struct builtin_description
*d
;
8103 tree v4sf_ftype_v4sf_v4sf_v16qi
8104 = build_function_type_list (V4SF_type_node
,
8105 V4SF_type_node
, V4SF_type_node
,
8106 V16QI_type_node
, NULL_TREE
);
8107 tree v4si_ftype_v4si_v4si_v16qi
8108 = build_function_type_list (V4SI_type_node
,
8109 V4SI_type_node
, V4SI_type_node
,
8110 V16QI_type_node
, NULL_TREE
);
8111 tree v8hi_ftype_v8hi_v8hi_v16qi
8112 = build_function_type_list (V8HI_type_node
,
8113 V8HI_type_node
, V8HI_type_node
,
8114 V16QI_type_node
, NULL_TREE
);
8115 tree v16qi_ftype_v16qi_v16qi_v16qi
8116 = build_function_type_list (V16QI_type_node
,
8117 V16QI_type_node
, V16QI_type_node
,
8118 V16QI_type_node
, NULL_TREE
);
8120 = build_function_type_list (V4SI_type_node
, integer_type_node
, NULL_TREE
);
8122 = build_function_type_list (V8HI_type_node
, integer_type_node
, NULL_TREE
);
8123 tree v16qi_ftype_int
8124 = build_function_type_list (V16QI_type_node
, integer_type_node
, NULL_TREE
);
8125 tree v8hi_ftype_v16qi
8126 = build_function_type_list (V8HI_type_node
, V16QI_type_node
, NULL_TREE
);
8127 tree v4sf_ftype_v4sf
8128 = build_function_type_list (V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
8130 tree v2si_ftype_v2si_v2si
8131 = build_function_type_list (opaque_V2SI_type_node
,
8132 opaque_V2SI_type_node
,
8133 opaque_V2SI_type_node
, NULL_TREE
);
8135 tree v2sf_ftype_v2sf_v2sf
8136 = build_function_type_list (opaque_V2SF_type_node
,
8137 opaque_V2SF_type_node
,
8138 opaque_V2SF_type_node
, NULL_TREE
);
8140 tree v2si_ftype_int_int
8141 = build_function_type_list (opaque_V2SI_type_node
,
8142 integer_type_node
, integer_type_node
,
8145 tree opaque_ftype_opaque
8146 = build_function_type_list (opaque_V4SI_type_node
,
8147 opaque_V4SI_type_node
, NULL_TREE
);
8149 tree v2si_ftype_v2si
8150 = build_function_type_list (opaque_V2SI_type_node
,
8151 opaque_V2SI_type_node
, NULL_TREE
);
8153 tree v2sf_ftype_v2sf
8154 = build_function_type_list (opaque_V2SF_type_node
,
8155 opaque_V2SF_type_node
, NULL_TREE
);
8157 tree v2sf_ftype_v2si
8158 = build_function_type_list (opaque_V2SF_type_node
,
8159 opaque_V2SI_type_node
, NULL_TREE
);
8161 tree v2si_ftype_v2sf
8162 = build_function_type_list (opaque_V2SI_type_node
,
8163 opaque_V2SF_type_node
, NULL_TREE
);
8165 tree v2si_ftype_v2si_char
8166 = build_function_type_list (opaque_V2SI_type_node
,
8167 opaque_V2SI_type_node
,
8168 char_type_node
, NULL_TREE
);
8170 tree v2si_ftype_int_char
8171 = build_function_type_list (opaque_V2SI_type_node
,
8172 integer_type_node
, char_type_node
, NULL_TREE
);
8174 tree v2si_ftype_char
8175 = build_function_type_list (opaque_V2SI_type_node
,
8176 char_type_node
, NULL_TREE
);
8178 tree int_ftype_int_int
8179 = build_function_type_list (integer_type_node
,
8180 integer_type_node
, integer_type_node
,
8183 tree opaque_ftype_opaque_opaque
8184 = build_function_type_list (opaque_V4SI_type_node
,
8185 opaque_V4SI_type_node
, opaque_V4SI_type_node
, NULL_TREE
);
8186 tree v4si_ftype_v4si_v4si
8187 = build_function_type_list (V4SI_type_node
,
8188 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
8189 tree v4sf_ftype_v4si_int
8190 = build_function_type_list (V4SF_type_node
,
8191 V4SI_type_node
, integer_type_node
, NULL_TREE
);
8192 tree v4si_ftype_v4sf_int
8193 = build_function_type_list (V4SI_type_node
,
8194 V4SF_type_node
, integer_type_node
, NULL_TREE
);
8195 tree v4si_ftype_v4si_int
8196 = build_function_type_list (V4SI_type_node
,
8197 V4SI_type_node
, integer_type_node
, NULL_TREE
);
8198 tree v8hi_ftype_v8hi_int
8199 = build_function_type_list (V8HI_type_node
,
8200 V8HI_type_node
, integer_type_node
, NULL_TREE
);
8201 tree v16qi_ftype_v16qi_int
8202 = build_function_type_list (V16QI_type_node
,
8203 V16QI_type_node
, integer_type_node
, NULL_TREE
);
8204 tree v16qi_ftype_v16qi_v16qi_int
8205 = build_function_type_list (V16QI_type_node
,
8206 V16QI_type_node
, V16QI_type_node
,
8207 integer_type_node
, NULL_TREE
);
8208 tree v8hi_ftype_v8hi_v8hi_int
8209 = build_function_type_list (V8HI_type_node
,
8210 V8HI_type_node
, V8HI_type_node
,
8211 integer_type_node
, NULL_TREE
);
8212 tree v4si_ftype_v4si_v4si_int
8213 = build_function_type_list (V4SI_type_node
,
8214 V4SI_type_node
, V4SI_type_node
,
8215 integer_type_node
, NULL_TREE
);
8216 tree v4sf_ftype_v4sf_v4sf_int
8217 = build_function_type_list (V4SF_type_node
,
8218 V4SF_type_node
, V4SF_type_node
,
8219 integer_type_node
, NULL_TREE
);
8220 tree v4sf_ftype_v4sf_v4sf
8221 = build_function_type_list (V4SF_type_node
,
8222 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
8223 tree opaque_ftype_opaque_opaque_opaque
8224 = build_function_type_list (opaque_V4SI_type_node
,
8225 opaque_V4SI_type_node
, opaque_V4SI_type_node
,
8226 opaque_V4SI_type_node
, NULL_TREE
);
8227 tree v4sf_ftype_v4sf_v4sf_v4si
8228 = build_function_type_list (V4SF_type_node
,
8229 V4SF_type_node
, V4SF_type_node
,
8230 V4SI_type_node
, NULL_TREE
);
8231 tree v4sf_ftype_v4sf_v4sf_v4sf
8232 = build_function_type_list (V4SF_type_node
,
8233 V4SF_type_node
, V4SF_type_node
,
8234 V4SF_type_node
, NULL_TREE
);
8235 tree v4si_ftype_v4si_v4si_v4si
8236 = build_function_type_list (V4SI_type_node
,
8237 V4SI_type_node
, V4SI_type_node
,
8238 V4SI_type_node
, NULL_TREE
);
8239 tree v8hi_ftype_v8hi_v8hi
8240 = build_function_type_list (V8HI_type_node
,
8241 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
8242 tree v8hi_ftype_v8hi_v8hi_v8hi
8243 = build_function_type_list (V8HI_type_node
,
8244 V8HI_type_node
, V8HI_type_node
,
8245 V8HI_type_node
, NULL_TREE
);
8246 tree v4si_ftype_v8hi_v8hi_v4si
8247 = build_function_type_list (V4SI_type_node
,
8248 V8HI_type_node
, V8HI_type_node
,
8249 V4SI_type_node
, NULL_TREE
);
8250 tree v4si_ftype_v16qi_v16qi_v4si
8251 = build_function_type_list (V4SI_type_node
,
8252 V16QI_type_node
, V16QI_type_node
,
8253 V4SI_type_node
, NULL_TREE
);
8254 tree v16qi_ftype_v16qi_v16qi
8255 = build_function_type_list (V16QI_type_node
,
8256 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
8257 tree v4si_ftype_v4sf_v4sf
8258 = build_function_type_list (V4SI_type_node
,
8259 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
8260 tree v8hi_ftype_v16qi_v16qi
8261 = build_function_type_list (V8HI_type_node
,
8262 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
8263 tree v4si_ftype_v8hi_v8hi
8264 = build_function_type_list (V4SI_type_node
,
8265 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
8266 tree v8hi_ftype_v4si_v4si
8267 = build_function_type_list (V8HI_type_node
,
8268 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
8269 tree v16qi_ftype_v8hi_v8hi
8270 = build_function_type_list (V16QI_type_node
,
8271 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
8272 tree v4si_ftype_v16qi_v4si
8273 = build_function_type_list (V4SI_type_node
,
8274 V16QI_type_node
, V4SI_type_node
, NULL_TREE
);
8275 tree v4si_ftype_v16qi_v16qi
8276 = build_function_type_list (V4SI_type_node
,
8277 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
8278 tree v4si_ftype_v8hi_v4si
8279 = build_function_type_list (V4SI_type_node
,
8280 V8HI_type_node
, V4SI_type_node
, NULL_TREE
);
8281 tree v4si_ftype_v8hi
8282 = build_function_type_list (V4SI_type_node
, V8HI_type_node
, NULL_TREE
);
8283 tree int_ftype_v4si_v4si
8284 = build_function_type_list (integer_type_node
,
8285 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
8286 tree int_ftype_v4sf_v4sf
8287 = build_function_type_list (integer_type_node
,
8288 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
8289 tree int_ftype_v16qi_v16qi
8290 = build_function_type_list (integer_type_node
,
8291 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
8292 tree int_ftype_v8hi_v8hi
8293 = build_function_type_list (integer_type_node
,
8294 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
8296 /* Add the simple ternary operators. */
8297 d
= (struct builtin_description
*) bdesc_3arg
;
8298 for (i
= 0; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
8300 enum machine_mode mode0
, mode1
, mode2
, mode3
;
8302 bool is_overloaded
= d
->code
>= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8303 && d
->code
<= ALTIVEC_BUILTIN_OVERLOADED_LAST
;
8314 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
8317 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
8318 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
8319 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
8320 mode3
= insn_data
[d
->icode
].operand
[3].mode
;
8323 /* When all four are of the same mode. */
8324 if (mode0
== mode1
&& mode1
== mode2
&& mode2
== mode3
)
8329 type
= opaque_ftype_opaque_opaque_opaque
;
8332 type
= v4si_ftype_v4si_v4si_v4si
;
8335 type
= v4sf_ftype_v4sf_v4sf_v4sf
;
8338 type
= v8hi_ftype_v8hi_v8hi_v8hi
;
8341 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
8347 else if (mode0
== mode1
&& mode1
== mode2
&& mode3
== V16QImode
)
8352 type
= v4si_ftype_v4si_v4si_v16qi
;
8355 type
= v4sf_ftype_v4sf_v4sf_v16qi
;
8358 type
= v8hi_ftype_v8hi_v8hi_v16qi
;
8361 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
8367 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
8368 && mode3
== V4SImode
)
8369 type
= v4si_ftype_v16qi_v16qi_v4si
;
8370 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
8371 && mode3
== V4SImode
)
8372 type
= v4si_ftype_v8hi_v8hi_v4si
;
8373 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
8374 && mode3
== V4SImode
)
8375 type
= v4sf_ftype_v4sf_v4sf_v4si
;
8377 /* vchar, vchar, vchar, 4 bit literal. */
8378 else if (mode0
== V16QImode
&& mode1
== mode0
&& mode2
== mode0
8380 type
= v16qi_ftype_v16qi_v16qi_int
;
8382 /* vshort, vshort, vshort, 4 bit literal. */
8383 else if (mode0
== V8HImode
&& mode1
== mode0
&& mode2
== mode0
8385 type
= v8hi_ftype_v8hi_v8hi_int
;
8387 /* vint, vint, vint, 4 bit literal. */
8388 else if (mode0
== V4SImode
&& mode1
== mode0
&& mode2
== mode0
8390 type
= v4si_ftype_v4si_v4si_int
;
8392 /* vfloat, vfloat, vfloat, 4 bit literal. */
8393 else if (mode0
== V4SFmode
&& mode1
== mode0
&& mode2
== mode0
8395 type
= v4sf_ftype_v4sf_v4sf_int
;
8400 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
8403 /* Add the simple binary operators. */
8404 d
= (struct builtin_description
*) bdesc_2arg
;
8405 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
8407 enum machine_mode mode0
, mode1
, mode2
;
8409 bool is_overloaded
= d
->code
>= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8410 && d
->code
<= ALTIVEC_BUILTIN_OVERLOADED_LAST
;
8420 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
8423 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
8424 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
8425 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
8428 /* When all three operands are of the same mode. */
8429 if (mode0
== mode1
&& mode1
== mode2
)
8434 type
= opaque_ftype_opaque_opaque
;
8437 type
= v4sf_ftype_v4sf_v4sf
;
8440 type
= v4si_ftype_v4si_v4si
;
8443 type
= v16qi_ftype_v16qi_v16qi
;
8446 type
= v8hi_ftype_v8hi_v8hi
;
8449 type
= v2si_ftype_v2si_v2si
;
8452 type
= v2sf_ftype_v2sf_v2sf
;
8455 type
= int_ftype_int_int
;
8462 /* A few other combos we really don't want to do manually. */
8464 /* vint, vfloat, vfloat. */
8465 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
)
8466 type
= v4si_ftype_v4sf_v4sf
;
8468 /* vshort, vchar, vchar. */
8469 else if (mode0
== V8HImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
8470 type
= v8hi_ftype_v16qi_v16qi
;
8472 /* vint, vshort, vshort. */
8473 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
8474 type
= v4si_ftype_v8hi_v8hi
;
8476 /* vshort, vint, vint. */
8477 else if (mode0
== V8HImode
&& mode1
== V4SImode
&& mode2
== V4SImode
)
8478 type
= v8hi_ftype_v4si_v4si
;
8480 /* vchar, vshort, vshort. */
8481 else if (mode0
== V16QImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
8482 type
= v16qi_ftype_v8hi_v8hi
;
8484 /* vint, vchar, vint. */
8485 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V4SImode
)
8486 type
= v4si_ftype_v16qi_v4si
;
8488 /* vint, vchar, vchar. */
8489 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
8490 type
= v4si_ftype_v16qi_v16qi
;
8492 /* vint, vshort, vint. */
8493 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V4SImode
)
8494 type
= v4si_ftype_v8hi_v4si
;
8496 /* vint, vint, 5 bit literal. */
8497 else if (mode0
== V4SImode
&& mode1
== V4SImode
&& mode2
== QImode
)
8498 type
= v4si_ftype_v4si_int
;
8500 /* vshort, vshort, 5 bit literal. */
8501 else if (mode0
== V8HImode
&& mode1
== V8HImode
&& mode2
== QImode
)
8502 type
= v8hi_ftype_v8hi_int
;
8504 /* vchar, vchar, 5 bit literal. */
8505 else if (mode0
== V16QImode
&& mode1
== V16QImode
&& mode2
== QImode
)
8506 type
= v16qi_ftype_v16qi_int
;
8508 /* vfloat, vint, 5 bit literal. */
8509 else if (mode0
== V4SFmode
&& mode1
== V4SImode
&& mode2
== QImode
)
8510 type
= v4sf_ftype_v4si_int
;
8512 /* vint, vfloat, 5 bit literal. */
8513 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== QImode
)
8514 type
= v4si_ftype_v4sf_int
;
8516 else if (mode0
== V2SImode
&& mode1
== SImode
&& mode2
== SImode
)
8517 type
= v2si_ftype_int_int
;
8519 else if (mode0
== V2SImode
&& mode1
== V2SImode
&& mode2
== QImode
)
8520 type
= v2si_ftype_v2si_char
;
8522 else if (mode0
== V2SImode
&& mode1
== SImode
&& mode2
== QImode
)
8523 type
= v2si_ftype_int_char
;
8528 gcc_assert (mode0
== SImode
);
8532 type
= int_ftype_v4si_v4si
;
8535 type
= int_ftype_v4sf_v4sf
;
8538 type
= int_ftype_v16qi_v16qi
;
8541 type
= int_ftype_v8hi_v8hi
;
8548 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
8551 /* Add the simple unary operators. */
8552 d
= (struct builtin_description
*) bdesc_1arg
;
8553 for (i
= 0; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
8555 enum machine_mode mode0
, mode1
;
8557 bool is_overloaded
= d
->code
>= ALTIVEC_BUILTIN_OVERLOADED_FIRST
8558 && d
->code
<= ALTIVEC_BUILTIN_OVERLOADED_LAST
;
8567 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
8570 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
8571 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
8574 if (mode0
== V4SImode
&& mode1
== QImode
)
8575 type
= v4si_ftype_int
;
8576 else if (mode0
== V8HImode
&& mode1
== QImode
)
8577 type
= v8hi_ftype_int
;
8578 else if (mode0
== V16QImode
&& mode1
== QImode
)
8579 type
= v16qi_ftype_int
;
8580 else if (mode0
== VOIDmode
&& mode1
== VOIDmode
)
8581 type
= opaque_ftype_opaque
;
8582 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
)
8583 type
= v4sf_ftype_v4sf
;
8584 else if (mode0
== V8HImode
&& mode1
== V16QImode
)
8585 type
= v8hi_ftype_v16qi
;
8586 else if (mode0
== V4SImode
&& mode1
== V8HImode
)
8587 type
= v4si_ftype_v8hi
;
8588 else if (mode0
== V2SImode
&& mode1
== V2SImode
)
8589 type
= v2si_ftype_v2si
;
8590 else if (mode0
== V2SFmode
&& mode1
== V2SFmode
)
8591 type
= v2sf_ftype_v2sf
;
8592 else if (mode0
== V2SFmode
&& mode1
== V2SImode
)
8593 type
= v2sf_ftype_v2si
;
8594 else if (mode0
== V2SImode
&& mode1
== V2SFmode
)
8595 type
= v2si_ftype_v2sf
;
8596 else if (mode0
== V2SImode
&& mode1
== QImode
)
8597 type
= v2si_ftype_char
;
8601 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
8606 rs6000_init_libfuncs (void)
8608 if (!TARGET_HARD_FLOAT
)
8611 if (DEFAULT_ABI
!= ABI_V4
)
8613 if (TARGET_XCOFF
&& ! TARGET_POWER2
&& ! TARGET_POWERPC
)
8615 /* AIX library routines for float->int conversion. */
8616 set_conv_libfunc (sfix_optab
, SImode
, DFmode
, "__itrunc");
8617 set_conv_libfunc (ufix_optab
, SImode
, DFmode
, "__uitrunc");
8618 set_conv_libfunc (sfix_optab
, SImode
, TFmode
, "_qitrunc");
8619 set_conv_libfunc (ufix_optab
, SImode
, TFmode
, "_quitrunc");
8622 /* AIX/Darwin/64-bit Linux quad floating point routines. */
8623 if (!TARGET_XL_COMPAT
)
8625 set_optab_libfunc (add_optab
, TFmode
, "__gcc_qadd");
8626 set_optab_libfunc (sub_optab
, TFmode
, "__gcc_qsub");
8627 set_optab_libfunc (smul_optab
, TFmode
, "__gcc_qmul");
8628 set_optab_libfunc (sdiv_optab
, TFmode
, "__gcc_qdiv");
8632 set_optab_libfunc (add_optab
, TFmode
, "_xlqadd");
8633 set_optab_libfunc (sub_optab
, TFmode
, "_xlqsub");
8634 set_optab_libfunc (smul_optab
, TFmode
, "_xlqmul");
8635 set_optab_libfunc (sdiv_optab
, TFmode
, "_xlqdiv");
8640 /* 32-bit SVR4 quad floating point routines. */
8642 set_optab_libfunc (add_optab
, TFmode
, "_q_add");
8643 set_optab_libfunc (sub_optab
, TFmode
, "_q_sub");
8644 set_optab_libfunc (neg_optab
, TFmode
, "_q_neg");
8645 set_optab_libfunc (smul_optab
, TFmode
, "_q_mul");
8646 set_optab_libfunc (sdiv_optab
, TFmode
, "_q_div");
8647 if (TARGET_PPC_GPOPT
|| TARGET_POWER2
)
8648 set_optab_libfunc (sqrt_optab
, TFmode
, "_q_sqrt");
8650 set_optab_libfunc (eq_optab
, TFmode
, "_q_feq");
8651 set_optab_libfunc (ne_optab
, TFmode
, "_q_fne");
8652 set_optab_libfunc (gt_optab
, TFmode
, "_q_fgt");
8653 set_optab_libfunc (ge_optab
, TFmode
, "_q_fge");
8654 set_optab_libfunc (lt_optab
, TFmode
, "_q_flt");
8655 set_optab_libfunc (le_optab
, TFmode
, "_q_fle");
8657 set_conv_libfunc (sext_optab
, TFmode
, SFmode
, "_q_stoq");
8658 set_conv_libfunc (sext_optab
, TFmode
, DFmode
, "_q_dtoq");
8659 set_conv_libfunc (trunc_optab
, SFmode
, TFmode
, "_q_qtos");
8660 set_conv_libfunc (trunc_optab
, DFmode
, TFmode
, "_q_qtod");
8661 set_conv_libfunc (sfix_optab
, SImode
, TFmode
, "_q_qtoi");
8662 set_conv_libfunc (ufix_optab
, SImode
, TFmode
, "_q_qtou");
8663 set_conv_libfunc (sfloat_optab
, TFmode
, SImode
, "_q_itoq");
8668 /* Expand a block clear operation, and return 1 if successful. Return 0
8669 if we should let the compiler generate normal code.
8671 operands[0] is the destination
8672 operands[1] is the length
8673 operands[3] is the alignment */
8676 expand_block_clear (rtx operands
[])
8678 rtx orig_dest
= operands
[0];
8679 rtx bytes_rtx
= operands
[1];
8680 rtx align_rtx
= operands
[3];
8681 bool constp
= (GET_CODE (bytes_rtx
) == CONST_INT
);
8682 HOST_WIDE_INT align
;
8683 HOST_WIDE_INT bytes
;
8688 /* If this is not a fixed size move, just call memcpy */
8692 /* This must be a fixed size alignment */
8693 gcc_assert (GET_CODE (align_rtx
) == CONST_INT
);
8694 align
= INTVAL (align_rtx
) * BITS_PER_UNIT
;
8696 /* Anything to clear? */
8697 bytes
= INTVAL (bytes_rtx
);
8701 /* Use the builtin memset after a point, to avoid huge code bloat.
8702 When optimize_size, avoid any significant code bloat; calling
8703 memset is about 4 instructions, so allow for one instruction to
8704 load zero and three to do clearing. */
8705 if (TARGET_ALTIVEC
&& align
>= 128)
8707 else if (TARGET_POWERPC64
&& align
>= 32)
8712 if (optimize_size
&& bytes
> 3 * clear_step
)
8714 if (! optimize_size
&& bytes
> 8 * clear_step
)
8717 for (offset
= 0; bytes
> 0; offset
+= clear_bytes
, bytes
-= clear_bytes
)
8719 enum machine_mode mode
= BLKmode
;
8722 if (bytes
>= 16 && TARGET_ALTIVEC
&& align
>= 128)
8727 else if (bytes
>= 8 && TARGET_POWERPC64
8728 /* 64-bit loads and stores require word-aligned
8730 && (align
>= 64 || (!STRICT_ALIGNMENT
&& align
>= 32)))
8735 else if (bytes
>= 4 && (align
>= 32 || !STRICT_ALIGNMENT
))
8736 { /* move 4 bytes */
8740 else if (bytes
== 2 && (align
>= 16 || !STRICT_ALIGNMENT
))
8741 { /* move 2 bytes */
8745 else /* move 1 byte at a time */
8751 dest
= adjust_address (orig_dest
, mode
, offset
);
8753 emit_move_insn (dest
, CONST0_RTX (mode
));
8760 /* Expand a block move operation, and return 1 if successful. Return 0
8761 if we should let the compiler generate normal code.
8763 operands[0] is the destination
8764 operands[1] is the source
8765 operands[2] is the length
8766 operands[3] is the alignment */
8768 #define MAX_MOVE_REG 4
8771 expand_block_move (rtx operands
[])
8773 rtx orig_dest
= operands
[0];
8774 rtx orig_src
= operands
[1];
8775 rtx bytes_rtx
= operands
[2];
8776 rtx align_rtx
= operands
[3];
8777 int constp
= (GET_CODE (bytes_rtx
) == CONST_INT
);
8782 rtx stores
[MAX_MOVE_REG
];
8785 /* If this is not a fixed size move, just call memcpy */
8789 /* This must be a fixed size alignment */
8790 gcc_assert (GET_CODE (align_rtx
) == CONST_INT
);
8791 align
= INTVAL (align_rtx
) * BITS_PER_UNIT
;
8793 /* Anything to move? */
8794 bytes
= INTVAL (bytes_rtx
);
8798 /* store_one_arg depends on expand_block_move to handle at least the size of
8799 reg_parm_stack_space. */
8800 if (bytes
> (TARGET_POWERPC64
? 64 : 32))
8803 for (offset
= 0; bytes
> 0; offset
+= move_bytes
, bytes
-= move_bytes
)
8806 rtx (*movmemsi
) (rtx
, rtx
, rtx
, rtx
);
8807 rtx (*mov
) (rtx
, rtx
);
8809 enum machine_mode mode
= BLKmode
;
8812 /* Altivec first, since it will be faster than a string move
8813 when it applies, and usually not significantly larger. */
8814 if (TARGET_ALTIVEC
&& bytes
>= 16 && align
>= 128)
8818 gen_func
.mov
= gen_movv4si
;
8820 else if (TARGET_STRING
8821 && bytes
> 24 /* move up to 32 bytes at a time */
8829 && ! fixed_regs
[12])
8831 move_bytes
= (bytes
> 32) ? 32 : bytes
;
8832 gen_func
.movmemsi
= gen_movmemsi_8reg
;
8834 else if (TARGET_STRING
8835 && bytes
> 16 /* move up to 24 bytes at a time */
8841 && ! fixed_regs
[10])
8843 move_bytes
= (bytes
> 24) ? 24 : bytes
;
8844 gen_func
.movmemsi
= gen_movmemsi_6reg
;
8846 else if (TARGET_STRING
8847 && bytes
> 8 /* move up to 16 bytes at a time */
8853 move_bytes
= (bytes
> 16) ? 16 : bytes
;
8854 gen_func
.movmemsi
= gen_movmemsi_4reg
;
8856 else if (bytes
>= 8 && TARGET_POWERPC64
8857 /* 64-bit loads and stores require word-aligned
8859 && (align
>= 64 || (!STRICT_ALIGNMENT
&& align
>= 32)))
8863 gen_func
.mov
= gen_movdi
;
8865 else if (TARGET_STRING
&& bytes
> 4 && !TARGET_POWERPC64
)
8866 { /* move up to 8 bytes at a time */
8867 move_bytes
= (bytes
> 8) ? 8 : bytes
;
8868 gen_func
.movmemsi
= gen_movmemsi_2reg
;
8870 else if (bytes
>= 4 && (align
>= 32 || !STRICT_ALIGNMENT
))
8871 { /* move 4 bytes */
8874 gen_func
.mov
= gen_movsi
;
8876 else if (bytes
== 2 && (align
>= 16 || !STRICT_ALIGNMENT
))
8877 { /* move 2 bytes */
8880 gen_func
.mov
= gen_movhi
;
8882 else if (TARGET_STRING
&& bytes
> 1)
8883 { /* move up to 4 bytes at a time */
8884 move_bytes
= (bytes
> 4) ? 4 : bytes
;
8885 gen_func
.movmemsi
= gen_movmemsi_1reg
;
8887 else /* move 1 byte at a time */
8891 gen_func
.mov
= gen_movqi
;
8894 src
= adjust_address (orig_src
, mode
, offset
);
8895 dest
= adjust_address (orig_dest
, mode
, offset
);
8897 if (mode
!= BLKmode
)
8899 rtx tmp_reg
= gen_reg_rtx (mode
);
8901 emit_insn ((*gen_func
.mov
) (tmp_reg
, src
));
8902 stores
[num_reg
++] = (*gen_func
.mov
) (dest
, tmp_reg
);
8905 if (mode
== BLKmode
|| num_reg
>= MAX_MOVE_REG
|| bytes
== move_bytes
)
8908 for (i
= 0; i
< num_reg
; i
++)
8909 emit_insn (stores
[i
]);
8913 if (mode
== BLKmode
)
8915 /* Move the address into scratch registers. The movmemsi
8916 patterns require zero offset. */
8917 if (!REG_P (XEXP (src
, 0)))
8919 rtx src_reg
= copy_addr_to_reg (XEXP (src
, 0));
8920 src
= replace_equiv_address (src
, src_reg
);
8922 set_mem_size (src
, GEN_INT (move_bytes
));
8924 if (!REG_P (XEXP (dest
, 0)))
8926 rtx dest_reg
= copy_addr_to_reg (XEXP (dest
, 0));
8927 dest
= replace_equiv_address (dest
, dest_reg
);
8929 set_mem_size (dest
, GEN_INT (move_bytes
));
8931 emit_insn ((*gen_func
.movmemsi
) (dest
, src
,
8932 GEN_INT (move_bytes
& 31),
8941 /* Return a string to perform a load_multiple operation.
8942 operands[0] is the vector.
8943 operands[1] is the source address.
8944 operands[2] is the first destination register. */
8947 rs6000_output_load_multiple (rtx operands
[3])
8949 /* We have to handle the case where the pseudo used to contain the address
8950 is assigned to one of the output registers. */
8952 int words
= XVECLEN (operands
[0], 0);
8955 if (XVECLEN (operands
[0], 0) == 1)
8956 return "{l|lwz} %2,0(%1)";
8958 for (i
= 0; i
< words
; i
++)
8959 if (refers_to_regno_p (REGNO (operands
[2]) + i
,
8960 REGNO (operands
[2]) + i
+ 1, operands
[1], 0))
8964 xop
[0] = GEN_INT (4 * (words
-1));
8965 xop
[1] = operands
[1];
8966 xop
[2] = operands
[2];
8967 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop
);
8972 xop
[0] = GEN_INT (4 * (words
-1));
8973 xop
[1] = operands
[1];
8974 xop
[2] = gen_rtx_REG (SImode
, REGNO (operands
[2]) + 1);
8975 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop
);
8980 for (j
= 0; j
< words
; j
++)
8983 xop
[0] = GEN_INT (j
* 4);
8984 xop
[1] = operands
[1];
8985 xop
[2] = gen_rtx_REG (SImode
, REGNO (operands
[2]) + j
);
8986 output_asm_insn ("{l|lwz} %2,%0(%1)", xop
);
8988 xop
[0] = GEN_INT (i
* 4);
8989 xop
[1] = operands
[1];
8990 output_asm_insn ("{l|lwz} %1,%0(%1)", xop
);
8995 return "{lsi|lswi} %2,%1,%N0";
8999 /* A validation routine: say whether CODE, a condition code, and MODE
9000 match. The other alternatives either don't make sense or should
9001 never be generated. */
9004 validate_condition_mode (enum rtx_code code
, enum machine_mode mode
)
9006 gcc_assert ((GET_RTX_CLASS (code
) == RTX_COMPARE
9007 || GET_RTX_CLASS (code
) == RTX_COMM_COMPARE
)
9008 && GET_MODE_CLASS (mode
) == MODE_CC
);
9010 /* These don't make sense. */
9011 gcc_assert ((code
!= GT
&& code
!= LT
&& code
!= GE
&& code
!= LE
)
9012 || mode
!= CCUNSmode
);
9014 gcc_assert ((code
!= GTU
&& code
!= LTU
&& code
!= GEU
&& code
!= LEU
)
9015 || mode
== CCUNSmode
);
9017 gcc_assert (mode
== CCFPmode
9018 || (code
!= ORDERED
&& code
!= UNORDERED
9019 && code
!= UNEQ
&& code
!= LTGT
9020 && code
!= UNGT
&& code
!= UNLT
9021 && code
!= UNGE
&& code
!= UNLE
));
9023 /* These should never be generated except for
9024 flag_finite_math_only. */
9025 gcc_assert (mode
!= CCFPmode
9026 || flag_finite_math_only
9027 || (code
!= LE
&& code
!= GE
9028 && code
!= UNEQ
&& code
!= LTGT
9029 && code
!= UNGT
&& code
!= UNLT
));
9031 /* These are invalid; the information is not there. */
9032 gcc_assert (mode
!= CCEQmode
|| code
== EQ
|| code
== NE
);
9036 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
9037 mask required to convert the result of a rotate insn into a shift
9038 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
9041 includes_lshift_p (rtx shiftop
, rtx andop
)
9043 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
9045 shift_mask
<<= INTVAL (shiftop
);
9047 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
9050 /* Similar, but for right shift. */
9053 includes_rshift_p (rtx shiftop
, rtx andop
)
9055 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
9057 shift_mask
>>= INTVAL (shiftop
);
9059 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
9062 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
9063 to perform a left shift. It must have exactly SHIFTOP least
9064 significant 0's, then one or more 1's, then zero or more 0's. */
9067 includes_rldic_lshift_p (rtx shiftop
, rtx andop
)
9069 if (GET_CODE (andop
) == CONST_INT
)
9071 HOST_WIDE_INT c
, lsb
, shift_mask
;
9074 if (c
== 0 || c
== ~0)
9078 shift_mask
<<= INTVAL (shiftop
);
9080 /* Find the least significant one bit. */
9083 /* It must coincide with the LSB of the shift mask. */
9084 if (-lsb
!= shift_mask
)
9087 /* Invert to look for the next transition (if any). */
9090 /* Remove the low group of ones (originally low group of zeros). */
9093 /* Again find the lsb, and check we have all 1's above. */
9097 else if (GET_CODE (andop
) == CONST_DOUBLE
9098 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
9100 HOST_WIDE_INT low
, high
, lsb
;
9101 HOST_WIDE_INT shift_mask_low
, shift_mask_high
;
9103 low
= CONST_DOUBLE_LOW (andop
);
9104 if (HOST_BITS_PER_WIDE_INT
< 64)
9105 high
= CONST_DOUBLE_HIGH (andop
);
9107 if ((low
== 0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== 0))
9108 || (low
== ~0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0)))
9111 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
9113 shift_mask_high
= ~0;
9114 if (INTVAL (shiftop
) > 32)
9115 shift_mask_high
<<= INTVAL (shiftop
) - 32;
9119 if (-lsb
!= shift_mask_high
|| INTVAL (shiftop
) < 32)
9126 return high
== -lsb
;
9129 shift_mask_low
= ~0;
9130 shift_mask_low
<<= INTVAL (shiftop
);
9134 if (-lsb
!= shift_mask_low
)
9137 if (HOST_BITS_PER_WIDE_INT
< 64)
9142 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
9145 return high
== -lsb
;
9149 return low
== -lsb
&& (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0);
9155 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
9156 to perform a left shift. It must have SHIFTOP or more least
9157 significant 0's, with the remainder of the word 1's. */
9160 includes_rldicr_lshift_p (rtx shiftop
, rtx andop
)
9162 if (GET_CODE (andop
) == CONST_INT
)
9164 HOST_WIDE_INT c
, lsb
, shift_mask
;
9167 shift_mask
<<= INTVAL (shiftop
);
9170 /* Find the least significant one bit. */
9173 /* It must be covered by the shift mask.
9174 This test also rejects c == 0. */
9175 if ((lsb
& shift_mask
) == 0)
9178 /* Check we have all 1's above the transition, and reject all 1's. */
9179 return c
== -lsb
&& lsb
!= 1;
9181 else if (GET_CODE (andop
) == CONST_DOUBLE
9182 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
9184 HOST_WIDE_INT low
, lsb
, shift_mask_low
;
9186 low
= CONST_DOUBLE_LOW (andop
);
9188 if (HOST_BITS_PER_WIDE_INT
< 64)
9190 HOST_WIDE_INT high
, shift_mask_high
;
9192 high
= CONST_DOUBLE_HIGH (andop
);
9196 shift_mask_high
= ~0;
9197 if (INTVAL (shiftop
) > 32)
9198 shift_mask_high
<<= INTVAL (shiftop
) - 32;
9202 if ((lsb
& shift_mask_high
) == 0)
9205 return high
== -lsb
;
9211 shift_mask_low
= ~0;
9212 shift_mask_low
<<= INTVAL (shiftop
);
9216 if ((lsb
& shift_mask_low
) == 0)
9219 return low
== -lsb
&& lsb
!= 1;
9225 /* Return 1 if operands will generate a valid arguments to rlwimi
9226 instruction for insert with right shift in 64-bit mode. The mask may
9227 not start on the first bit or stop on the last bit because wrap-around
9228 effects of instruction do not correspond to semantics of RTL insn. */
9231 insvdi_rshift_rlwimi_p (rtx sizeop
, rtx startop
, rtx shiftop
)
9233 if (INTVAL (startop
) < 64
9234 && INTVAL (startop
) > 32
9235 && (INTVAL (sizeop
) + INTVAL (startop
) < 64)
9236 && (INTVAL (sizeop
) + INTVAL (startop
) > 33)
9237 && (INTVAL (sizeop
) + INTVAL (startop
) + INTVAL (shiftop
) < 96)
9238 && (INTVAL (sizeop
) + INTVAL (startop
) + INTVAL (shiftop
) >= 64)
9239 && (64 - (INTVAL (shiftop
) & 63)) >= INTVAL (sizeop
))
9245 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
9246 for lfq and stfq insns iff the registers are hard registers. */
9249 registers_ok_for_quad_peep (rtx reg1
, rtx reg2
)
9251 /* We might have been passed a SUBREG. */
9252 if (GET_CODE (reg1
) != REG
|| GET_CODE (reg2
) != REG
)
9255 /* We might have been passed non floating point registers. */
9256 if (!FP_REGNO_P (REGNO (reg1
))
9257 || !FP_REGNO_P (REGNO (reg2
)))
9260 return (REGNO (reg1
) == REGNO (reg2
) - 1);
9263 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
9264 addr1 and addr2 must be in consecutive memory locations
9265 (addr2 == addr1 + 8). */
9268 mems_ok_for_quad_peep (rtx mem1
, rtx mem2
)
9274 /* The mems cannot be volatile. */
9275 if (MEM_VOLATILE_P (mem1
) || MEM_VOLATILE_P (mem2
))
9278 addr1
= XEXP (mem1
, 0);
9279 addr2
= XEXP (mem2
, 0);
9281 /* Extract an offset (if used) from the first addr. */
9282 if (GET_CODE (addr1
) == PLUS
)
9284 /* If not a REG, return zero. */
9285 if (GET_CODE (XEXP (addr1
, 0)) != REG
)
9289 reg1
= REGNO (XEXP (addr1
, 0));
9290 /* The offset must be constant! */
9291 if (GET_CODE (XEXP (addr1
, 1)) != CONST_INT
)
9293 offset1
= INTVAL (XEXP (addr1
, 1));
9296 else if (GET_CODE (addr1
) != REG
)
9300 reg1
= REGNO (addr1
);
9301 /* This was a simple (mem (reg)) expression. Offset is 0. */
9305 /* Make sure the second address is a (mem (plus (reg) (const_int)))
9306 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
9307 register as addr1. */
9308 if (offset1
== -8 && GET_CODE (addr2
) == REG
&& reg1
== REGNO (addr2
))
9310 if (GET_CODE (addr2
) != PLUS
)
9313 if (GET_CODE (XEXP (addr2
, 0)) != REG
9314 || GET_CODE (XEXP (addr2
, 1)) != CONST_INT
)
9317 if (reg1
!= REGNO (XEXP (addr2
, 0)))
9320 /* The offset for the second addr must be 8 more than the first addr. */
9321 if (INTVAL (XEXP (addr2
, 1)) != offset1
+ 8)
9324 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
9329 /* Return the register class of a scratch register needed to copy IN into
9330 or out of a register in CLASS in MODE. If it can be done directly,
9331 NO_REGS is returned. */
9334 secondary_reload_class (enum reg_class
class,
9335 enum machine_mode mode ATTRIBUTE_UNUSED
,
9340 if (TARGET_ELF
|| (DEFAULT_ABI
== ABI_DARWIN
9342 && MACHOPIC_INDIRECT
9346 /* We cannot copy a symbolic operand directly into anything
9347 other than BASE_REGS for TARGET_ELF. So indicate that a
9348 register from BASE_REGS is needed as an intermediate
9351 On Darwin, pic addresses require a load from memory, which
9352 needs a base register. */
9353 if (class != BASE_REGS
9354 && (GET_CODE (in
) == SYMBOL_REF
9355 || GET_CODE (in
) == HIGH
9356 || GET_CODE (in
) == LABEL_REF
9357 || GET_CODE (in
) == CONST
))
9361 if (GET_CODE (in
) == REG
)
9364 if (regno
>= FIRST_PSEUDO_REGISTER
)
9366 regno
= true_regnum (in
);
9367 if (regno
>= FIRST_PSEUDO_REGISTER
)
9371 else if (GET_CODE (in
) == SUBREG
)
9373 regno
= true_regnum (in
);
9374 if (regno
>= FIRST_PSEUDO_REGISTER
)
9380 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
9382 if (class == GENERAL_REGS
|| class == BASE_REGS
9383 || (regno
>= 0 && INT_REGNO_P (regno
)))
9386 /* Constants, memory, and FP registers can go into FP registers. */
9387 if ((regno
== -1 || FP_REGNO_P (regno
))
9388 && (class == FLOAT_REGS
|| class == NON_SPECIAL_REGS
))
9391 /* Memory, and AltiVec registers can go into AltiVec registers. */
9392 if ((regno
== -1 || ALTIVEC_REGNO_P (regno
))
9393 && class == ALTIVEC_REGS
)
9396 /* We can copy among the CR registers. */
9397 if ((class == CR_REGS
|| class == CR0_REGS
)
9398 && regno
>= 0 && CR_REGNO_P (regno
))
9401 /* Otherwise, we need GENERAL_REGS. */
9402 return GENERAL_REGS
;
9405 /* Given a comparison operation, return the bit number in CCR to test. We
9406 know this is a valid comparison.
9408 SCC_P is 1 if this is for an scc. That means that %D will have been
9409 used instead of %C, so the bits will be in different places.
9411 Return -1 if OP isn't a valid comparison for some reason. */
9414 ccr_bit (rtx op
, int scc_p
)
9416 enum rtx_code code
= GET_CODE (op
);
9417 enum machine_mode cc_mode
;
9422 if (!COMPARISON_P (op
))
9427 gcc_assert (GET_CODE (reg
) == REG
&& CR_REGNO_P (REGNO (reg
)));
9429 cc_mode
= GET_MODE (reg
);
9430 cc_regnum
= REGNO (reg
);
9431 base_bit
= 4 * (cc_regnum
- CR0_REGNO
);
9433 validate_condition_mode (code
, cc_mode
);
9435 /* When generating a sCOND operation, only positive conditions are
9438 || code
== EQ
|| code
== GT
|| code
== LT
|| code
== UNORDERED
9439 || code
== GTU
|| code
== LTU
);
9444 return scc_p
? base_bit
+ 3 : base_bit
+ 2;
9446 return base_bit
+ 2;
9447 case GT
: case GTU
: case UNLE
:
9448 return base_bit
+ 1;
9449 case LT
: case LTU
: case UNGE
:
9451 case ORDERED
: case UNORDERED
:
9452 return base_bit
+ 3;
9455 /* If scc, we will have done a cror to put the bit in the
9456 unordered position. So test that bit. For integer, this is ! LT
9457 unless this is an scc insn. */
9458 return scc_p
? base_bit
+ 3 : base_bit
;
9461 return scc_p
? base_bit
+ 3 : base_bit
+ 1;
9468 /* Return the GOT register. */
9471 rs6000_got_register (rtx value ATTRIBUTE_UNUSED
)
9473 /* The second flow pass currently (June 1999) can't update
9474 regs_ever_live without disturbing other parts of the compiler, so
9475 update it here to make the prolog/epilogue code happy. */
9476 if (no_new_pseudos
&& ! regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
])
9477 regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
9479 current_function_uses_pic_offset_table
= 1;
9481 return pic_offset_table_rtx
;
9484 /* Function to init struct machine_function.
9485 This will be called, via a pointer variable,
9486 from push_function_context. */
9488 static struct machine_function
*
9489 rs6000_init_machine_status (void)
9491 return ggc_alloc_cleared (sizeof (machine_function
));
9494 /* These macros test for integers and extract the low-order bits. */
9496 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
9497 && GET_MODE (X) == VOIDmode)
9499 #define INT_LOWPART(X) \
9500 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
9506 unsigned long val
= INT_LOWPART (op
);
9508 /* If the high bit is zero, the value is the first 1 bit we find
9510 if ((val
& 0x80000000) == 0)
9512 gcc_assert (val
& 0xffffffff);
9515 while (((val
<<= 1) & 0x80000000) == 0)
9520 /* If the high bit is set and the low bit is not, or the mask is all
9521 1's, the value is zero. */
9522 if ((val
& 1) == 0 || (val
& 0xffffffff) == 0xffffffff)
9525 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
9528 while (((val
>>= 1) & 1) != 0)
9538 unsigned long val
= INT_LOWPART (op
);
9540 /* If the low bit is zero, the value is the first 1 bit we find from
9544 gcc_assert (val
& 0xffffffff);
9547 while (((val
>>= 1) & 1) == 0)
9553 /* If the low bit is set and the high bit is not, or the mask is all
9554 1's, the value is 31. */
9555 if ((val
& 0x80000000) == 0 || (val
& 0xffffffff) == 0xffffffff)
9558 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
9561 while (((val
<<= 1) & 0x80000000) != 0)
9567 /* Locate some local-dynamic symbol still in use by this function
9568 so that we can print its name in some tls_ld pattern. */
9571 rs6000_get_some_local_dynamic_name (void)
9575 if (cfun
->machine
->some_ld_name
)
9576 return cfun
->machine
->some_ld_name
;
9578 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
9580 && for_each_rtx (&PATTERN (insn
),
9581 rs6000_get_some_local_dynamic_name_1
, 0))
9582 return cfun
->machine
->some_ld_name
;
9587 /* Helper function for rs6000_get_some_local_dynamic_name. */
9590 rs6000_get_some_local_dynamic_name_1 (rtx
*px
, void *data ATTRIBUTE_UNUSED
)
9594 if (GET_CODE (x
) == SYMBOL_REF
)
9596 const char *str
= XSTR (x
, 0);
9597 if (SYMBOL_REF_TLS_MODEL (x
) == TLS_MODEL_LOCAL_DYNAMIC
)
9599 cfun
->machine
->some_ld_name
= str
;
9607 /* Write out a function code label. */
9610 rs6000_output_function_entry (FILE *file
, const char *fname
)
9612 if (fname
[0] != '.')
9614 switch (DEFAULT_ABI
)
9623 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "L.");
9632 RS6000_OUTPUT_BASENAME (file
, fname
);
9634 assemble_name (file
, fname
);
9637 /* Print an operand. Recognize special options, documented below. */
9640 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
9641 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
9643 #define SMALL_DATA_RELOC "sda21"
9644 #define SMALL_DATA_REG 0
9648 print_operand (FILE *file
, rtx x
, int code
)
9652 unsigned HOST_WIDE_INT uval
;
9657 /* Write out an instruction after the call which may be replaced
9658 with glue code by the loader. This depends on the AIX version. */
9659 asm_fprintf (file
, RS6000_CALL_GLUE
);
9662 /* %a is output_address. */
9665 /* If X is a constant integer whose low-order 5 bits are zero,
9666 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
9667 in the AIX assembler where "sri" with a zero shift count
9668 writes a trash instruction. */
9669 if (GET_CODE (x
) == CONST_INT
&& (INTVAL (x
) & 31) == 0)
9676 /* If constant, low-order 16 bits of constant, unsigned.
9677 Otherwise, write normally. */
9679 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 0xffff);
9681 print_operand (file
, x
, 0);
9685 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
9686 for 64-bit mask direction. */
9687 putc (((INT_LOWPART (x
) & 1) == 0 ? 'r' : 'l'), file
);
9690 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
9694 /* X is a CR register. Print the number of the GT bit of the CR. */
9695 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
9696 output_operand_lossage ("invalid %%E value");
9698 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
) + 1);
9702 /* Like 'J' but get to the EQ bit. */
9703 gcc_assert (GET_CODE (x
) == REG
);
9705 /* Bit 1 is EQ bit. */
9706 i
= 4 * (REGNO (x
) - CR0_REGNO
) + 2;
9708 fprintf (file
, "%d", i
);
9712 /* X is a CR register. Print the number of the EQ bit of the CR */
9713 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
9714 output_operand_lossage ("invalid %%E value");
9716 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
) + 2);
9720 /* X is a CR register. Print the shift count needed to move it
9721 to the high-order four bits. */
9722 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
9723 output_operand_lossage ("invalid %%f value");
9725 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
));
9729 /* Similar, but print the count for the rotate in the opposite
9731 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
9732 output_operand_lossage ("invalid %%F value");
9734 fprintf (file
, "%d", 32 - 4 * (REGNO (x
) - CR0_REGNO
));
9738 /* X is a constant integer. If it is negative, print "m",
9739 otherwise print "z". This is to make an aze or ame insn. */
9740 if (GET_CODE (x
) != CONST_INT
)
9741 output_operand_lossage ("invalid %%G value");
9742 else if (INTVAL (x
) >= 0)
9749 /* If constant, output low-order five bits. Otherwise, write
9752 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 31);
9754 print_operand (file
, x
, 0);
9758 /* If constant, output low-order six bits. Otherwise, write
9761 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 63);
9763 print_operand (file
, x
, 0);
9767 /* Print `i' if this is a constant, else nothing. */
9773 /* Write the bit number in CCR for jump. */
9776 output_operand_lossage ("invalid %%j code");
9778 fprintf (file
, "%d", i
);
9782 /* Similar, but add one for shift count in rlinm for scc and pass
9783 scc flag to `ccr_bit'. */
9786 output_operand_lossage ("invalid %%J code");
9788 /* If we want bit 31, write a shift count of zero, not 32. */
9789 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
9793 /* X must be a constant. Write the 1's complement of the
9796 output_operand_lossage ("invalid %%k value");
9798 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ~ INT_LOWPART (x
));
9802 /* X must be a symbolic constant on ELF. Write an
9803 expression suitable for an 'addi' that adds in the low 16
9805 if (GET_CODE (x
) != CONST
)
9807 print_operand_address (file
, x
);
9812 if (GET_CODE (XEXP (x
, 0)) != PLUS
9813 || (GET_CODE (XEXP (XEXP (x
, 0), 0)) != SYMBOL_REF
9814 && GET_CODE (XEXP (XEXP (x
, 0), 0)) != LABEL_REF
)
9815 || GET_CODE (XEXP (XEXP (x
, 0), 1)) != CONST_INT
)
9816 output_operand_lossage ("invalid %%K value");
9817 print_operand_address (file
, XEXP (XEXP (x
, 0), 0));
9819 /* For GNU as, there must be a non-alphanumeric character
9820 between 'l' and the number. The '-' is added by
9821 print_operand() already. */
9822 if (INTVAL (XEXP (XEXP (x
, 0), 1)) >= 0)
9824 print_operand (file
, XEXP (XEXP (x
, 0), 1), 0);
9828 /* %l is output_asm_label. */
9831 /* Write second word of DImode or DFmode reference. Works on register
9832 or non-indexed memory only. */
9833 if (GET_CODE (x
) == REG
)
9834 fputs (reg_names
[REGNO (x
) + 1], file
);
9835 else if (GET_CODE (x
) == MEM
)
9837 /* Handle possible auto-increment. Since it is pre-increment and
9838 we have already done it, we can just use an offset of word. */
9839 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
9840 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
9841 output_address (plus_constant (XEXP (XEXP (x
, 0), 0),
9844 output_address (XEXP (adjust_address_nv (x
, SImode
,
9848 if (small_data_operand (x
, GET_MODE (x
)))
9849 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
9850 reg_names
[SMALL_DATA_REG
]);
9855 /* MB value for a mask operand. */
9856 if (! mask_operand (x
, SImode
))
9857 output_operand_lossage ("invalid %%m value");
9859 fprintf (file
, "%d", extract_MB (x
));
9863 /* ME value for a mask operand. */
9864 if (! mask_operand (x
, SImode
))
9865 output_operand_lossage ("invalid %%M value");
9867 fprintf (file
, "%d", extract_ME (x
));
9870 /* %n outputs the negative of its operand. */
9873 /* Write the number of elements in the vector times 4. */
9874 if (GET_CODE (x
) != PARALLEL
)
9875 output_operand_lossage ("invalid %%N value");
9877 fprintf (file
, "%d", XVECLEN (x
, 0) * 4);
9881 /* Similar, but subtract 1 first. */
9882 if (GET_CODE (x
) != PARALLEL
)
9883 output_operand_lossage ("invalid %%O value");
9885 fprintf (file
, "%d", (XVECLEN (x
, 0) - 1) * 4);
9889 /* X is a CONST_INT that is a power of two. Output the logarithm. */
9891 || INT_LOWPART (x
) < 0
9892 || (i
= exact_log2 (INT_LOWPART (x
))) < 0)
9893 output_operand_lossage ("invalid %%p value");
9895 fprintf (file
, "%d", i
);
9899 /* The operand must be an indirect memory reference. The result
9900 is the register name. */
9901 if (GET_CODE (x
) != MEM
|| GET_CODE (XEXP (x
, 0)) != REG
9902 || REGNO (XEXP (x
, 0)) >= 32)
9903 output_operand_lossage ("invalid %%P value");
9905 fputs (reg_names
[REGNO (XEXP (x
, 0))], file
);
9909 /* This outputs the logical code corresponding to a boolean
9910 expression. The expression may have one or both operands
9911 negated (if one, only the first one). For condition register
9912 logical operations, it will also treat the negated
9913 CR codes as NOTs, but not handle NOTs of them. */
9915 const char *const *t
= 0;
9917 enum rtx_code code
= GET_CODE (x
);
9918 static const char * const tbl
[3][3] = {
9919 { "and", "andc", "nor" },
9920 { "or", "orc", "nand" },
9921 { "xor", "eqv", "xor" } };
9925 else if (code
== IOR
)
9927 else if (code
== XOR
)
9930 output_operand_lossage ("invalid %%q value");
9932 if (GET_CODE (XEXP (x
, 0)) != NOT
)
9936 if (GET_CODE (XEXP (x
, 1)) == NOT
)
9954 /* X is a CR register. Print the mask for `mtcrf'. */
9955 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
9956 output_operand_lossage ("invalid %%R value");
9958 fprintf (file
, "%d", 128 >> (REGNO (x
) - CR0_REGNO
));
9962 /* Low 5 bits of 32 - value */
9964 output_operand_lossage ("invalid %%s value");
9966 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, (32 - INT_LOWPART (x
)) & 31);
9970 /* PowerPC64 mask position. All 0's is excluded.
9971 CONST_INT 32-bit mask is considered sign-extended so any
9972 transition must occur within the CONST_INT, not on the boundary. */
9973 if (! mask_operand (x
, DImode
))
9974 output_operand_lossage ("invalid %%S value");
9976 uval
= INT_LOWPART (x
);
9978 if (uval
& 1) /* Clear Left */
9980 #if HOST_BITS_PER_WIDE_INT > 64
9981 uval
&= ((unsigned HOST_WIDE_INT
) 1 << 64) - 1;
9985 else /* Clear Right */
9988 #if HOST_BITS_PER_WIDE_INT > 64
9989 uval
&= ((unsigned HOST_WIDE_INT
) 1 << 64) - 1;
9995 gcc_assert (i
>= 0);
9996 fprintf (file
, "%d", i
);
10000 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
10001 gcc_assert (GET_CODE (x
) == REG
&& GET_MODE (x
) == CCmode
);
10003 /* Bit 3 is OV bit. */
10004 i
= 4 * (REGNO (x
) - CR0_REGNO
) + 3;
10006 /* If we want bit 31, write a shift count of zero, not 32. */
10007 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
10011 /* Print the symbolic name of a branch target register. */
10012 if (GET_CODE (x
) != REG
|| (REGNO (x
) != LINK_REGISTER_REGNUM
10013 && REGNO (x
) != COUNT_REGISTER_REGNUM
))
10014 output_operand_lossage ("invalid %%T value");
10015 else if (REGNO (x
) == LINK_REGISTER_REGNUM
)
10016 fputs (TARGET_NEW_MNEMONICS
? "lr" : "r", file
);
10018 fputs ("ctr", file
);
10022 /* High-order 16 bits of constant for use in unsigned operand. */
10024 output_operand_lossage ("invalid %%u value");
10026 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
10027 (INT_LOWPART (x
) >> 16) & 0xffff);
10031 /* High-order 16 bits of constant for use in signed operand. */
10033 output_operand_lossage ("invalid %%v value");
10035 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
10036 (INT_LOWPART (x
) >> 16) & 0xffff);
10040 /* Print `u' if this has an auto-increment or auto-decrement. */
10041 if (GET_CODE (x
) == MEM
10042 && (GET_CODE (XEXP (x
, 0)) == PRE_INC
10043 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
))
10048 /* Print the trap code for this operand. */
10049 switch (GET_CODE (x
))
10052 fputs ("eq", file
); /* 4 */
10055 fputs ("ne", file
); /* 24 */
10058 fputs ("lt", file
); /* 16 */
10061 fputs ("le", file
); /* 20 */
10064 fputs ("gt", file
); /* 8 */
10067 fputs ("ge", file
); /* 12 */
10070 fputs ("llt", file
); /* 2 */
10073 fputs ("lle", file
); /* 6 */
10076 fputs ("lgt", file
); /* 1 */
10079 fputs ("lge", file
); /* 5 */
10082 gcc_unreachable ();
10087 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
10090 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
10091 ((INT_LOWPART (x
) & 0xffff) ^ 0x8000) - 0x8000);
10093 print_operand (file
, x
, 0);
10097 /* MB value for a PowerPC64 rldic operand. */
10098 val
= (GET_CODE (x
) == CONST_INT
10099 ? INTVAL (x
) : CONST_DOUBLE_HIGH (x
));
10104 for (i
= 0; i
< HOST_BITS_PER_WIDE_INT
; i
++)
10105 if ((val
<<= 1) < 0)
10108 #if HOST_BITS_PER_WIDE_INT == 32
10109 if (GET_CODE (x
) == CONST_INT
&& i
>= 0)
10110 i
+= 32; /* zero-extend high-part was all 0's */
10111 else if (GET_CODE (x
) == CONST_DOUBLE
&& i
== 32)
10113 val
= CONST_DOUBLE_LOW (x
);
10119 for ( ; i
< 64; i
++)
10120 if ((val
<<= 1) < 0)
10125 fprintf (file
, "%d", i
+ 1);
10129 if (GET_CODE (x
) == MEM
10130 && legitimate_indexed_address_p (XEXP (x
, 0), 0))
10135 /* Like 'L', for third word of TImode */
10136 if (GET_CODE (x
) == REG
)
10137 fputs (reg_names
[REGNO (x
) + 2], file
);
10138 else if (GET_CODE (x
) == MEM
)
10140 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
10141 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
10142 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 8));
10144 output_address (XEXP (adjust_address_nv (x
, SImode
, 8), 0));
10145 if (small_data_operand (x
, GET_MODE (x
)))
10146 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
10147 reg_names
[SMALL_DATA_REG
]);
10152 /* X is a SYMBOL_REF. Write out the name preceded by a
10153 period and without any trailing data in brackets. Used for function
10154 names. If we are configured for System V (or the embedded ABI) on
10155 the PowerPC, do not emit the period, since those systems do not use
10156 TOCs and the like. */
10157 gcc_assert (GET_CODE (x
) == SYMBOL_REF
);
10159 /* Mark the decl as referenced so that cgraph will output the
10161 if (SYMBOL_REF_DECL (x
))
10162 mark_decl_referenced (SYMBOL_REF_DECL (x
));
10164 /* For macho, check to see if we need a stub. */
10167 const char *name
= XSTR (x
, 0);
10169 if (MACHOPIC_INDIRECT
10170 && machopic_classify_symbol (x
) == MACHOPIC_UNDEFINED_FUNCTION
)
10171 name
= machopic_indirection_name (x
, /*stub_p=*/true);
10173 assemble_name (file
, name
);
10175 else if (!DOT_SYMBOLS
)
10176 assemble_name (file
, XSTR (x
, 0));
10178 rs6000_output_function_entry (file
, XSTR (x
, 0));
10182 /* Like 'L', for last word of TImode. */
10183 if (GET_CODE (x
) == REG
)
10184 fputs (reg_names
[REGNO (x
) + 3], file
);
10185 else if (GET_CODE (x
) == MEM
)
10187 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
10188 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
10189 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 12));
10191 output_address (XEXP (adjust_address_nv (x
, SImode
, 12), 0));
10192 if (small_data_operand (x
, GET_MODE (x
)))
10193 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
10194 reg_names
[SMALL_DATA_REG
]);
10198 /* Print AltiVec or SPE memory operand. */
10203 gcc_assert (GET_CODE (x
) == MEM
);
10209 /* Handle [reg]. */
10210 if (GET_CODE (tmp
) == REG
)
10212 fprintf (file
, "0(%s)", reg_names
[REGNO (tmp
)]);
10215 /* Handle [reg+UIMM]. */
10216 else if (GET_CODE (tmp
) == PLUS
&&
10217 GET_CODE (XEXP (tmp
, 1)) == CONST_INT
)
10221 gcc_assert (GET_CODE (XEXP (tmp
, 0)) == REG
);
10223 x
= INTVAL (XEXP (tmp
, 1));
10224 fprintf (file
, "%d(%s)", x
, reg_names
[REGNO (XEXP (tmp
, 0))]);
10228 /* Fall through. Must be [reg+reg]. */
10231 && GET_CODE (tmp
) == AND
10232 && GET_CODE (XEXP (tmp
, 1)) == CONST_INT
10233 && INTVAL (XEXP (tmp
, 1)) == -16)
10234 tmp
= XEXP (tmp
, 0);
10235 if (GET_CODE (tmp
) == REG
)
10236 fprintf (file
, "0,%s", reg_names
[REGNO (tmp
)]);
10239 gcc_assert (GET_CODE (tmp
) == PLUS
10240 && GET_CODE (XEXP (tmp
, 1)) == REG
);
10242 if (REGNO (XEXP (tmp
, 0)) == 0)
10243 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 1)) ],
10244 reg_names
[ REGNO (XEXP (tmp
, 0)) ]);
10246 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 0)) ],
10247 reg_names
[ REGNO (XEXP (tmp
, 1)) ]);
10253 if (GET_CODE (x
) == REG
)
10254 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
10255 else if (GET_CODE (x
) == MEM
)
10257 /* We need to handle PRE_INC and PRE_DEC here, since we need to
10258 know the width from the mode. */
10259 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
)
10260 fprintf (file
, "%d(%s)", GET_MODE_SIZE (GET_MODE (x
)),
10261 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
10262 else if (GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
10263 fprintf (file
, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x
)),
10264 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
10266 output_address (XEXP (x
, 0));
10269 output_addr_const (file
, x
);
10273 assemble_name (file
, rs6000_get_some_local_dynamic_name ());
10277 output_operand_lossage ("invalid %%xn code");
10281 /* Print the address of an operand. */
10284 print_operand_address (FILE *file
, rtx x
)
10286 if (GET_CODE (x
) == REG
)
10287 fprintf (file
, "0(%s)", reg_names
[ REGNO (x
) ]);
10288 else if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == CONST
10289 || GET_CODE (x
) == LABEL_REF
)
10291 output_addr_const (file
, x
);
10292 if (small_data_operand (x
, GET_MODE (x
)))
10293 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
10294 reg_names
[SMALL_DATA_REG
]);
10296 gcc_assert (!TARGET_TOC
);
10298 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == REG
)
10300 if (REGNO (XEXP (x
, 0)) == 0)
10301 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 1)) ],
10302 reg_names
[ REGNO (XEXP (x
, 0)) ]);
10304 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 0)) ],
10305 reg_names
[ REGNO (XEXP (x
, 1)) ]);
10307 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
)
10308 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
"(%s)",
10309 INTVAL (XEXP (x
, 1)), reg_names
[ REGNO (XEXP (x
, 0)) ]);
10311 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
10312 && CONSTANT_P (XEXP (x
, 1)))
10314 output_addr_const (file
, XEXP (x
, 1));
10315 fprintf (file
, "@l(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
10319 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
10320 && CONSTANT_P (XEXP (x
, 1)))
10322 fprintf (file
, "lo16(");
10323 output_addr_const (file
, XEXP (x
, 1));
10324 fprintf (file
, ")(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
10327 else if (legitimate_constant_pool_address_p (x
))
10329 if (TARGET_AIX
&& (!TARGET_ELF
|| !TARGET_MINIMAL_TOC
))
10331 rtx contains_minus
= XEXP (x
, 1);
10335 /* Find the (minus (sym) (toc)) buried in X, and temporarily
10336 turn it into (sym) for output_addr_const. */
10337 while (GET_CODE (XEXP (contains_minus
, 0)) != MINUS
)
10338 contains_minus
= XEXP (contains_minus
, 0);
10340 minus
= XEXP (contains_minus
, 0);
10341 symref
= XEXP (minus
, 0);
10342 XEXP (contains_minus
, 0) = symref
;
10347 name
= XSTR (symref
, 0);
10348 newname
= alloca (strlen (name
) + sizeof ("@toc"));
10349 strcpy (newname
, name
);
10350 strcat (newname
, "@toc");
10351 XSTR (symref
, 0) = newname
;
10353 output_addr_const (file
, XEXP (x
, 1));
10355 XSTR (symref
, 0) = name
;
10356 XEXP (contains_minus
, 0) = minus
;
10359 output_addr_const (file
, XEXP (x
, 1));
10361 fprintf (file
, "(%s)", reg_names
[REGNO (XEXP (x
, 0))]);
10364 gcc_unreachable ();
10367 /* Target hook for assembling integer objects. The PowerPC version has
10368 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
10369 is defined. It also needs to handle DI-mode objects on 64-bit
10373 rs6000_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
10375 #ifdef RELOCATABLE_NEEDS_FIXUP
10376 /* Special handling for SI values. */
10377 if (RELOCATABLE_NEEDS_FIXUP
&& size
== 4 && aligned_p
)
10379 extern int in_toc_section (void);
10380 static int recurse
= 0;
10382 /* For -mrelocatable, we mark all addresses that need to be fixed up
10383 in the .fixup section. */
10384 if (TARGET_RELOCATABLE
10385 && !in_toc_section ()
10386 && !in_text_section ()
10387 && !in_unlikely_text_section ()
10389 && GET_CODE (x
) != CONST_INT
10390 && GET_CODE (x
) != CONST_DOUBLE
10396 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCP", fixuplabelno
);
10398 ASM_OUTPUT_LABEL (asm_out_file
, buf
);
10399 fprintf (asm_out_file
, "\t.long\t(");
10400 output_addr_const (asm_out_file
, x
);
10401 fprintf (asm_out_file
, ")@fixup\n");
10402 fprintf (asm_out_file
, "\t.section\t\".fixup\",\"aw\"\n");
10403 ASM_OUTPUT_ALIGN (asm_out_file
, 2);
10404 fprintf (asm_out_file
, "\t.long\t");
10405 assemble_name (asm_out_file
, buf
);
10406 fprintf (asm_out_file
, "\n\t.previous\n");
10410 /* Remove initial .'s to turn a -mcall-aixdesc function
10411 address into the address of the descriptor, not the function
10413 else if (GET_CODE (x
) == SYMBOL_REF
10414 && XSTR (x
, 0)[0] == '.'
10415 && DEFAULT_ABI
== ABI_AIX
)
10417 const char *name
= XSTR (x
, 0);
10418 while (*name
== '.')
10421 fprintf (asm_out_file
, "\t.long\t%s\n", name
);
10425 #endif /* RELOCATABLE_NEEDS_FIXUP */
10426 return default_assemble_integer (x
, size
, aligned_p
);
10429 #ifdef HAVE_GAS_HIDDEN
10430 /* Emit an assembler directive to set symbol visibility for DECL to
10431 VISIBILITY_TYPE. */
10434 rs6000_assemble_visibility (tree decl
, int vis
)
10436 /* Functions need to have their entry point symbol visibility set as
10437 well as their descriptor symbol visibility. */
10438 if (DEFAULT_ABI
== ABI_AIX
10440 && TREE_CODE (decl
) == FUNCTION_DECL
)
10442 static const char * const visibility_types
[] = {
10443 NULL
, "internal", "hidden", "protected"
10446 const char *name
, *type
;
10448 name
= ((* targetm
.strip_name_encoding
)
10449 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
))));
10450 type
= visibility_types
[vis
];
10452 fprintf (asm_out_file
, "\t.%s\t%s\n", type
, name
);
10453 fprintf (asm_out_file
, "\t.%s\t.%s\n", type
, name
);
10456 default_assemble_visibility (decl
, vis
);
10461 rs6000_reverse_condition (enum machine_mode mode
, enum rtx_code code
)
10463 /* Reversal of FP compares takes care -- an ordered compare
10464 becomes an unordered compare and vice versa. */
10465 if (mode
== CCFPmode
10466 && (!flag_finite_math_only
10467 || code
== UNLT
|| code
== UNLE
|| code
== UNGT
|| code
== UNGE
10468 || code
== UNEQ
|| code
== LTGT
))
10469 return reverse_condition_maybe_unordered (code
);
10471 return reverse_condition (code
);
10474 /* Generate a compare for CODE. Return a brand-new rtx that
10475 represents the result of the compare. */
10478 rs6000_generate_compare (enum rtx_code code
)
10480 enum machine_mode comp_mode
;
10481 rtx compare_result
;
10483 if (rs6000_compare_fp_p
)
10484 comp_mode
= CCFPmode
;
10485 else if (code
== GTU
|| code
== LTU
10486 || code
== GEU
|| code
== LEU
)
10487 comp_mode
= CCUNSmode
;
10488 else if ((code
== EQ
|| code
== NE
)
10489 && GET_CODE (rs6000_compare_op0
) == SUBREG
10490 && GET_CODE (rs6000_compare_op1
) == SUBREG
10491 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op0
)
10492 && SUBREG_PROMOTED_UNSIGNED_P (rs6000_compare_op1
))
10493 /* These are unsigned values, perhaps there will be a later
10494 ordering compare that can be shared with this one.
10495 Unfortunately we cannot detect the signedness of the operands
10496 for non-subregs. */
10497 comp_mode
= CCUNSmode
;
10499 comp_mode
= CCmode
;
10501 /* First, the compare. */
10502 compare_result
= gen_reg_rtx (comp_mode
);
10504 /* SPE FP compare instructions on the GPRs. Yuck! */
10505 if ((TARGET_E500
&& !TARGET_FPRS
&& TARGET_HARD_FLOAT
)
10506 && rs6000_compare_fp_p
)
10508 rtx cmp
, or_result
, compare_result2
;
10509 enum machine_mode op_mode
= GET_MODE (rs6000_compare_op0
);
10511 if (op_mode
== VOIDmode
)
10512 op_mode
= GET_MODE (rs6000_compare_op1
);
10514 /* Note: The E500 comparison instructions set the GT bit (x +
10515 1), on success. This explains the mess. */
10519 case EQ
: case UNEQ
: case NE
: case LTGT
:
10523 cmp
= flag_unsafe_math_optimizations
10524 ? gen_tstsfeq_gpr (compare_result
, rs6000_compare_op0
,
10525 rs6000_compare_op1
)
10526 : gen_cmpsfeq_gpr (compare_result
, rs6000_compare_op0
,
10527 rs6000_compare_op1
);
10531 cmp
= flag_unsafe_math_optimizations
10532 ? gen_tstdfeq_gpr (compare_result
, rs6000_compare_op0
,
10533 rs6000_compare_op1
)
10534 : gen_cmpdfeq_gpr (compare_result
, rs6000_compare_op0
,
10535 rs6000_compare_op1
);
10539 gcc_unreachable ();
10543 case GT
: case GTU
: case UNGT
: case UNGE
: case GE
: case GEU
:
10547 cmp
= flag_unsafe_math_optimizations
10548 ? gen_tstsfgt_gpr (compare_result
, rs6000_compare_op0
,
10549 rs6000_compare_op1
)
10550 : gen_cmpsfgt_gpr (compare_result
, rs6000_compare_op0
,
10551 rs6000_compare_op1
);
10555 cmp
= flag_unsafe_math_optimizations
10556 ? gen_tstdfgt_gpr (compare_result
, rs6000_compare_op0
,
10557 rs6000_compare_op1
)
10558 : gen_cmpdfgt_gpr (compare_result
, rs6000_compare_op0
,
10559 rs6000_compare_op1
);
10563 gcc_unreachable ();
10567 case LT
: case LTU
: case UNLT
: case UNLE
: case LE
: case LEU
:
10571 cmp
= flag_unsafe_math_optimizations
10572 ? gen_tstsflt_gpr (compare_result
, rs6000_compare_op0
,
10573 rs6000_compare_op1
)
10574 : gen_cmpsflt_gpr (compare_result
, rs6000_compare_op0
,
10575 rs6000_compare_op1
);
10579 cmp
= flag_unsafe_math_optimizations
10580 ? gen_tstdflt_gpr (compare_result
, rs6000_compare_op0
,
10581 rs6000_compare_op1
)
10582 : gen_cmpdflt_gpr (compare_result
, rs6000_compare_op0
,
10583 rs6000_compare_op1
);
10587 gcc_unreachable ();
10591 gcc_unreachable ();
10594 /* Synthesize LE and GE from LT/GT || EQ. */
10595 if (code
== LE
|| code
== GE
|| code
== LEU
|| code
== GEU
)
10601 case LE
: code
= LT
; break;
10602 case GE
: code
= GT
; break;
10603 case LEU
: code
= LT
; break;
10604 case GEU
: code
= GT
; break;
10605 default: gcc_unreachable ();
10608 compare_result2
= gen_reg_rtx (CCFPmode
);
10614 cmp
= flag_unsafe_math_optimizations
10615 ? gen_tstsfeq_gpr (compare_result2
, rs6000_compare_op0
,
10616 rs6000_compare_op1
)
10617 : gen_cmpsfeq_gpr (compare_result2
, rs6000_compare_op0
,
10618 rs6000_compare_op1
);
10622 cmp
= flag_unsafe_math_optimizations
10623 ? gen_tstdfeq_gpr (compare_result2
, rs6000_compare_op0
,
10624 rs6000_compare_op1
)
10625 : gen_cmpdfeq_gpr (compare_result2
, rs6000_compare_op0
,
10626 rs6000_compare_op1
);
10630 gcc_unreachable ();
10634 /* OR them together. */
10635 or_result
= gen_reg_rtx (CCFPmode
);
10636 cmp
= gen_e500_cr_ior_compare (or_result
, compare_result
,
10638 compare_result
= or_result
;
10643 if (code
== NE
|| code
== LTGT
)
10653 /* Generate XLC-compatible TFmode compare as PARALLEL with extra
10654 CLOBBERs to match cmptf_internal2 pattern. */
10655 if (comp_mode
== CCFPmode
&& TARGET_XL_COMPAT
10656 && GET_MODE (rs6000_compare_op0
) == TFmode
10657 && (DEFAULT_ABI
== ABI_AIX
|| DEFAULT_ABI
== ABI_DARWIN
)
10658 && TARGET_HARD_FLOAT
&& TARGET_FPRS
&& TARGET_LONG_DOUBLE_128
)
10659 emit_insn (gen_rtx_PARALLEL (VOIDmode
,
10661 gen_rtx_SET (VOIDmode
,
10663 gen_rtx_COMPARE (comp_mode
,
10664 rs6000_compare_op0
,
10665 rs6000_compare_op1
)),
10666 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_SCRATCH (DFmode
)),
10667 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_SCRATCH (DFmode
)),
10668 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_SCRATCH (DFmode
)),
10669 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_SCRATCH (DFmode
)),
10670 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_SCRATCH (DFmode
)),
10671 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_SCRATCH (DFmode
)),
10672 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_SCRATCH (DFmode
)),
10673 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_SCRATCH (DFmode
)))));
10674 else if (GET_CODE (rs6000_compare_op1
) == UNSPEC
10675 && XINT (rs6000_compare_op1
, 1) == UNSPEC_SP_TEST
)
10677 rtx op1
= XVECEXP (rs6000_compare_op1
, 0, 0);
10678 comp_mode
= CCEQmode
;
10679 compare_result
= gen_reg_rtx (CCEQmode
);
10681 emit_insn (gen_stack_protect_testdi (compare_result
,
10682 rs6000_compare_op0
, op1
));
10684 emit_insn (gen_stack_protect_testsi (compare_result
,
10685 rs6000_compare_op0
, op1
));
10688 emit_insn (gen_rtx_SET (VOIDmode
, compare_result
,
10689 gen_rtx_COMPARE (comp_mode
,
10690 rs6000_compare_op0
,
10691 rs6000_compare_op1
)));
10694 /* Some kinds of FP comparisons need an OR operation;
10695 under flag_finite_math_only we don't bother. */
10696 if (rs6000_compare_fp_p
10697 && !flag_finite_math_only
10698 && !(TARGET_HARD_FLOAT
&& TARGET_E500
&& !TARGET_FPRS
)
10699 && (code
== LE
|| code
== GE
10700 || code
== UNEQ
|| code
== LTGT
10701 || code
== UNGT
|| code
== UNLT
))
10703 enum rtx_code or1
, or2
;
10704 rtx or1_rtx
, or2_rtx
, compare2_rtx
;
10705 rtx or_result
= gen_reg_rtx (CCEQmode
);
10709 case LE
: or1
= LT
; or2
= EQ
; break;
10710 case GE
: or1
= GT
; or2
= EQ
; break;
10711 case UNEQ
: or1
= UNORDERED
; or2
= EQ
; break;
10712 case LTGT
: or1
= LT
; or2
= GT
; break;
10713 case UNGT
: or1
= UNORDERED
; or2
= GT
; break;
10714 case UNLT
: or1
= UNORDERED
; or2
= LT
; break;
10715 default: gcc_unreachable ();
10717 validate_condition_mode (or1
, comp_mode
);
10718 validate_condition_mode (or2
, comp_mode
);
10719 or1_rtx
= gen_rtx_fmt_ee (or1
, SImode
, compare_result
, const0_rtx
);
10720 or2_rtx
= gen_rtx_fmt_ee (or2
, SImode
, compare_result
, const0_rtx
);
10721 compare2_rtx
= gen_rtx_COMPARE (CCEQmode
,
10722 gen_rtx_IOR (SImode
, or1_rtx
, or2_rtx
),
10724 emit_insn (gen_rtx_SET (VOIDmode
, or_result
, compare2_rtx
));
10726 compare_result
= or_result
;
10730 validate_condition_mode (code
, GET_MODE (compare_result
));
10732 return gen_rtx_fmt_ee (code
, VOIDmode
, compare_result
, const0_rtx
);
10736 /* Emit the RTL for an sCOND pattern. */
10739 rs6000_emit_sCOND (enum rtx_code code
, rtx result
)
10742 enum machine_mode op_mode
;
10743 enum rtx_code cond_code
;
10745 condition_rtx
= rs6000_generate_compare (code
);
10746 cond_code
= GET_CODE (condition_rtx
);
10748 if (TARGET_E500
&& rs6000_compare_fp_p
10749 && !TARGET_FPRS
&& TARGET_HARD_FLOAT
)
10753 PUT_MODE (condition_rtx
, SImode
);
10754 t
= XEXP (condition_rtx
, 0);
10756 gcc_assert (cond_code
== NE
|| cond_code
== EQ
);
10758 if (cond_code
== NE
)
10759 emit_insn (gen_e500_flip_gt_bit (t
, t
));
10761 emit_insn (gen_move_from_CR_gt_bit (result
, t
));
10765 if (cond_code
== NE
10766 || cond_code
== GE
|| cond_code
== LE
10767 || cond_code
== GEU
|| cond_code
== LEU
10768 || cond_code
== ORDERED
|| cond_code
== UNGE
|| cond_code
== UNLE
)
10770 rtx not_result
= gen_reg_rtx (CCEQmode
);
10771 rtx not_op
, rev_cond_rtx
;
10772 enum machine_mode cc_mode
;
10774 cc_mode
= GET_MODE (XEXP (condition_rtx
, 0));
10776 rev_cond_rtx
= gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode
, cond_code
),
10777 SImode
, XEXP (condition_rtx
, 0), const0_rtx
);
10778 not_op
= gen_rtx_COMPARE (CCEQmode
, rev_cond_rtx
, const0_rtx
);
10779 emit_insn (gen_rtx_SET (VOIDmode
, not_result
, not_op
));
10780 condition_rtx
= gen_rtx_EQ (VOIDmode
, not_result
, const0_rtx
);
10783 op_mode
= GET_MODE (rs6000_compare_op0
);
10784 if (op_mode
== VOIDmode
)
10785 op_mode
= GET_MODE (rs6000_compare_op1
);
10787 if (TARGET_POWERPC64
&& (op_mode
== DImode
|| rs6000_compare_fp_p
))
10789 PUT_MODE (condition_rtx
, DImode
);
10790 convert_move (result
, condition_rtx
, 0);
10794 PUT_MODE (condition_rtx
, SImode
);
10795 emit_insn (gen_rtx_SET (VOIDmode
, result
, condition_rtx
));
10799 /* Emit a branch of kind CODE to location LOC. */
10802 rs6000_emit_cbranch (enum rtx_code code
, rtx loc
)
10804 rtx condition_rtx
, loc_ref
;
10806 condition_rtx
= rs6000_generate_compare (code
);
10807 loc_ref
= gen_rtx_LABEL_REF (VOIDmode
, loc
);
10808 emit_jump_insn (gen_rtx_SET (VOIDmode
, pc_rtx
,
10809 gen_rtx_IF_THEN_ELSE (VOIDmode
, condition_rtx
,
10810 loc_ref
, pc_rtx
)));
10813 /* Return the string to output a conditional branch to LABEL, which is
10814 the operand number of the label, or -1 if the branch is really a
10815 conditional return.
10817 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
10818 condition code register and its mode specifies what kind of
10819 comparison we made.
10821 REVERSED is nonzero if we should reverse the sense of the comparison.
10823 INSN is the insn. */
10826 output_cbranch (rtx op
, const char *label
, int reversed
, rtx insn
)
10828 static char string
[64];
10829 enum rtx_code code
= GET_CODE (op
);
10830 rtx cc_reg
= XEXP (op
, 0);
10831 enum machine_mode mode
= GET_MODE (cc_reg
);
10832 int cc_regno
= REGNO (cc_reg
) - CR0_REGNO
;
10833 int need_longbranch
= label
!= NULL
&& get_attr_length (insn
) == 8;
10834 int really_reversed
= reversed
^ need_longbranch
;
10840 validate_condition_mode (code
, mode
);
10842 /* Work out which way this really branches. We could use
10843 reverse_condition_maybe_unordered here always but this
10844 makes the resulting assembler clearer. */
10845 if (really_reversed
)
10847 /* Reversal of FP compares takes care -- an ordered compare
10848 becomes an unordered compare and vice versa. */
10849 if (mode
== CCFPmode
)
10850 code
= reverse_condition_maybe_unordered (code
);
10852 code
= reverse_condition (code
);
10855 if ((TARGET_E500
&& !TARGET_FPRS
&& TARGET_HARD_FLOAT
) && mode
== CCFPmode
)
10857 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
10862 /* Opposite of GT. */
10871 gcc_unreachable ();
10877 /* Not all of these are actually distinct opcodes, but
10878 we distinguish them for clarity of the resulting assembler. */
10879 case NE
: case LTGT
:
10880 ccode
= "ne"; break;
10881 case EQ
: case UNEQ
:
10882 ccode
= "eq"; break;
10884 ccode
= "ge"; break;
10885 case GT
: case GTU
: case UNGT
:
10886 ccode
= "gt"; break;
10888 ccode
= "le"; break;
10889 case LT
: case LTU
: case UNLT
:
10890 ccode
= "lt"; break;
10891 case UNORDERED
: ccode
= "un"; break;
10892 case ORDERED
: ccode
= "nu"; break;
10893 case UNGE
: ccode
= "nl"; break;
10894 case UNLE
: ccode
= "ng"; break;
10896 gcc_unreachable ();
10899 /* Maybe we have a guess as to how likely the branch is.
10900 The old mnemonics don't have a way to specify this information. */
10902 note
= find_reg_note (insn
, REG_BR_PROB
, NULL_RTX
);
10903 if (note
!= NULL_RTX
)
10905 /* PROB is the difference from 50%. */
10906 int prob
= INTVAL (XEXP (note
, 0)) - REG_BR_PROB_BASE
/ 2;
10908 /* Only hint for highly probable/improbable branches on newer
10909 cpus as static prediction overrides processor dynamic
10910 prediction. For older cpus we may as well always hint, but
10911 assume not taken for branches that are very close to 50% as a
10912 mispredicted taken branch is more expensive than a
10913 mispredicted not-taken branch. */
10914 if (rs6000_always_hint
10915 || abs (prob
) > REG_BR_PROB_BASE
/ 100 * 48)
10917 if (abs (prob
) > REG_BR_PROB_BASE
/ 20
10918 && ((prob
> 0) ^ need_longbranch
))
10926 s
+= sprintf (s
, "{b%sr|b%slr%s} ", ccode
, ccode
, pred
);
10928 s
+= sprintf (s
, "{b%s|b%s%s} ", ccode
, ccode
, pred
);
10930 /* We need to escape any '%' characters in the reg_names string.
10931 Assume they'd only be the first character.... */
10932 if (reg_names
[cc_regno
+ CR0_REGNO
][0] == '%')
10934 s
+= sprintf (s
, "%s", reg_names
[cc_regno
+ CR0_REGNO
]);
10938 /* If the branch distance was too far, we may have to use an
10939 unconditional branch to go the distance. */
10940 if (need_longbranch
)
10941 s
+= sprintf (s
, ",$+8\n\tb %s", label
);
10943 s
+= sprintf (s
, ",%s", label
);
10949 /* Return the string to flip the GT bit on a CR. */
10951 output_e500_flip_gt_bit (rtx dst
, rtx src
)
10953 static char string
[64];
10956 gcc_assert (GET_CODE (dst
) == REG
&& CR_REGNO_P (REGNO (dst
))
10957 && GET_CODE (src
) == REG
&& CR_REGNO_P (REGNO (src
)));
10960 a
= 4 * (REGNO (dst
) - CR0_REGNO
) + 1;
10961 b
= 4 * (REGNO (src
) - CR0_REGNO
) + 1;
10963 sprintf (string
, "crnot %d,%d", a
, b
);
10967 /* Return insn index for the vector compare instruction for given CODE,
10968 and DEST_MODE, OP_MODE. Return INSN_NOT_AVAILABLE if valid insn is
10972 get_vec_cmp_insn (enum rtx_code code
,
10973 enum machine_mode dest_mode
,
10974 enum machine_mode op_mode
)
10976 if (!TARGET_ALTIVEC
)
10977 return INSN_NOT_AVAILABLE
;
10982 if (dest_mode
== V16QImode
&& op_mode
== V16QImode
)
10983 return UNSPEC_VCMPEQUB
;
10984 if (dest_mode
== V8HImode
&& op_mode
== V8HImode
)
10985 return UNSPEC_VCMPEQUH
;
10986 if (dest_mode
== V4SImode
&& op_mode
== V4SImode
)
10987 return UNSPEC_VCMPEQUW
;
10988 if (dest_mode
== V4SImode
&& op_mode
== V4SFmode
)
10989 return UNSPEC_VCMPEQFP
;
10992 if (dest_mode
== V4SImode
&& op_mode
== V4SFmode
)
10993 return UNSPEC_VCMPGEFP
;
10995 if (dest_mode
== V16QImode
&& op_mode
== V16QImode
)
10996 return UNSPEC_VCMPGTSB
;
10997 if (dest_mode
== V8HImode
&& op_mode
== V8HImode
)
10998 return UNSPEC_VCMPGTSH
;
10999 if (dest_mode
== V4SImode
&& op_mode
== V4SImode
)
11000 return UNSPEC_VCMPGTSW
;
11001 if (dest_mode
== V4SImode
&& op_mode
== V4SFmode
)
11002 return UNSPEC_VCMPGTFP
;
11005 if (dest_mode
== V16QImode
&& op_mode
== V16QImode
)
11006 return UNSPEC_VCMPGTUB
;
11007 if (dest_mode
== V8HImode
&& op_mode
== V8HImode
)
11008 return UNSPEC_VCMPGTUH
;
11009 if (dest_mode
== V4SImode
&& op_mode
== V4SImode
)
11010 return UNSPEC_VCMPGTUW
;
11015 return INSN_NOT_AVAILABLE
;
11018 /* Emit vector compare for operands OP0 and OP1 using code RCODE.
11019 DMODE is expected destination mode. This is a recursive function. */
11022 rs6000_emit_vector_compare (enum rtx_code rcode
,
11024 enum machine_mode dmode
)
11028 enum machine_mode dest_mode
;
11029 enum machine_mode op_mode
= GET_MODE (op1
);
11031 gcc_assert (TARGET_ALTIVEC
);
11032 gcc_assert (GET_MODE (op0
) == GET_MODE (op1
));
11034 /* Floating point vector compare instructions uses destination V4SImode.
11035 Move destination to appropriate mode later. */
11036 if (dmode
== V4SFmode
)
11037 dest_mode
= V4SImode
;
11041 mask
= gen_reg_rtx (dest_mode
);
11042 vec_cmp_insn
= get_vec_cmp_insn (rcode
, dest_mode
, op_mode
);
11044 if (vec_cmp_insn
== INSN_NOT_AVAILABLE
)
11046 bool swap_operands
= false;
11047 bool try_again
= false;
11052 swap_operands
= true;
11057 swap_operands
= true;
11061 /* Treat A != B as ~(A==B). */
11063 enum insn_code nor_code
;
11064 rtx eq_rtx
= rs6000_emit_vector_compare (EQ
, op0
, op1
,
11067 nor_code
= one_cmpl_optab
->handlers
[(int)dest_mode
].insn_code
;
11068 gcc_assert (nor_code
!= CODE_FOR_nothing
);
11069 emit_insn (GEN_FCN (nor_code
) (mask
, eq_rtx
));
11071 if (dmode
!= dest_mode
)
11073 rtx temp
= gen_reg_rtx (dest_mode
);
11074 convert_move (temp
, mask
, 0);
11084 /* Try GT/GTU/LT/LTU OR EQ */
11087 enum insn_code ior_code
;
11088 enum rtx_code new_code
;
11109 gcc_unreachable ();
11112 c_rtx
= rs6000_emit_vector_compare (new_code
,
11113 op0
, op1
, dest_mode
);
11114 eq_rtx
= rs6000_emit_vector_compare (EQ
, op0
, op1
,
11117 ior_code
= ior_optab
->handlers
[(int)dest_mode
].insn_code
;
11118 gcc_assert (ior_code
!= CODE_FOR_nothing
);
11119 emit_insn (GEN_FCN (ior_code
) (mask
, c_rtx
, eq_rtx
));
11120 if (dmode
!= dest_mode
)
11122 rtx temp
= gen_reg_rtx (dest_mode
);
11123 convert_move (temp
, mask
, 0);
11130 gcc_unreachable ();
11135 vec_cmp_insn
= get_vec_cmp_insn (rcode
, dest_mode
, op_mode
);
11136 /* You only get two chances. */
11137 gcc_assert (vec_cmp_insn
!= INSN_NOT_AVAILABLE
);
11149 emit_insn (gen_rtx_SET (VOIDmode
, mask
,
11150 gen_rtx_UNSPEC (dest_mode
,
11151 gen_rtvec (2, op0
, op1
),
11153 if (dmode
!= dest_mode
)
11155 rtx temp
= gen_reg_rtx (dest_mode
);
11156 convert_move (temp
, mask
, 0);
11162 /* Return vector select instruction for MODE. Return INSN_NOT_AVAILABLE, if
11163 valid insn doesn exist for given mode. */
11166 get_vsel_insn (enum machine_mode mode
)
11171 return UNSPEC_VSEL4SI
;
11174 return UNSPEC_VSEL4SF
;
11177 return UNSPEC_VSEL8HI
;
11180 return UNSPEC_VSEL16QI
;
11183 return INSN_NOT_AVAILABLE
;
11186 return INSN_NOT_AVAILABLE
;
11189 /* Emit vector select insn where DEST is destination using
11190 operands OP1, OP2 and MASK. */
11193 rs6000_emit_vector_select (rtx dest
, rtx op1
, rtx op2
, rtx mask
)
11196 enum machine_mode dest_mode
= GET_MODE (dest
);
11197 int vsel_insn_index
= get_vsel_insn (GET_MODE (dest
));
11199 temp
= gen_reg_rtx (dest_mode
);
11201 /* For each vector element, select op1 when mask is 1 otherwise
11203 t
= gen_rtx_SET (VOIDmode
, temp
,
11204 gen_rtx_UNSPEC (dest_mode
,
11205 gen_rtvec (3, op2
, op1
, mask
),
11208 emit_move_insn (dest
, temp
);
11212 /* Emit vector conditional expression.
11213 DEST is destination. OP1 and OP2 are two VEC_COND_EXPR operands.
11214 CC_OP0 and CC_OP1 are the two operands for the relation operation COND. */
11217 rs6000_emit_vector_cond_expr (rtx dest
, rtx op1
, rtx op2
,
11218 rtx cond
, rtx cc_op0
, rtx cc_op1
)
11220 enum machine_mode dest_mode
= GET_MODE (dest
);
11221 enum rtx_code rcode
= GET_CODE (cond
);
11224 if (!TARGET_ALTIVEC
)
11227 /* Get the vector mask for the given relational operations. */
11228 mask
= rs6000_emit_vector_compare (rcode
, cc_op0
, cc_op1
, dest_mode
);
11230 rs6000_emit_vector_select (dest
, op1
, op2
, mask
);
11235 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
11236 operands of the last comparison is nonzero/true, FALSE_COND if it
11237 is zero/false. Return 0 if the hardware has no such operation. */
11240 rs6000_emit_cmove (rtx dest
, rtx op
, rtx true_cond
, rtx false_cond
)
11242 enum rtx_code code
= GET_CODE (op
);
11243 rtx op0
= rs6000_compare_op0
;
11244 rtx op1
= rs6000_compare_op1
;
11245 REAL_VALUE_TYPE c1
;
11246 enum machine_mode compare_mode
= GET_MODE (op0
);
11247 enum machine_mode result_mode
= GET_MODE (dest
);
11249 bool is_against_zero
;
11251 /* These modes should always match. */
11252 if (GET_MODE (op1
) != compare_mode
11253 /* In the isel case however, we can use a compare immediate, so
11254 op1 may be a small constant. */
11255 && (!TARGET_ISEL
|| !short_cint_operand (op1
, VOIDmode
)))
11257 if (GET_MODE (true_cond
) != result_mode
)
11259 if (GET_MODE (false_cond
) != result_mode
)
11262 /* First, work out if the hardware can do this at all, or
11263 if it's too slow.... */
11264 if (! rs6000_compare_fp_p
)
11267 return rs6000_emit_int_cmove (dest
, op
, true_cond
, false_cond
);
11270 else if (TARGET_E500
&& TARGET_HARD_FLOAT
&& !TARGET_FPRS
11271 && GET_MODE_CLASS (compare_mode
) == MODE_FLOAT
)
11274 is_against_zero
= op1
== CONST0_RTX (compare_mode
);
11276 /* A floating-point subtract might overflow, underflow, or produce
11277 an inexact result, thus changing the floating-point flags, so it
11278 can't be generated if we care about that. It's safe if one side
11279 of the construct is zero, since then no subtract will be
11281 if (GET_MODE_CLASS (compare_mode
) == MODE_FLOAT
11282 && flag_trapping_math
&& ! is_against_zero
)
11285 /* Eliminate half of the comparisons by switching operands, this
11286 makes the remaining code simpler. */
11287 if (code
== UNLT
|| code
== UNGT
|| code
== UNORDERED
|| code
== NE
11288 || code
== LTGT
|| code
== LT
|| code
== UNLE
)
11290 code
= reverse_condition_maybe_unordered (code
);
11292 true_cond
= false_cond
;
11296 /* UNEQ and LTGT take four instructions for a comparison with zero,
11297 it'll probably be faster to use a branch here too. */
11298 if (code
== UNEQ
&& HONOR_NANS (compare_mode
))
11301 if (GET_CODE (op1
) == CONST_DOUBLE
)
11302 REAL_VALUE_FROM_CONST_DOUBLE (c1
, op1
);
11304 /* We're going to try to implement comparisons by performing
11305 a subtract, then comparing against zero. Unfortunately,
11306 Inf - Inf is NaN which is not zero, and so if we don't
11307 know that the operand is finite and the comparison
11308 would treat EQ different to UNORDERED, we can't do it. */
11309 if (HONOR_INFINITIES (compare_mode
)
11310 && code
!= GT
&& code
!= UNGE
11311 && (GET_CODE (op1
) != CONST_DOUBLE
|| real_isinf (&c1
))
11312 /* Constructs of the form (a OP b ? a : b) are safe. */
11313 && ((! rtx_equal_p (op0
, false_cond
) && ! rtx_equal_p (op1
, false_cond
))
11314 || (! rtx_equal_p (op0
, true_cond
)
11315 && ! rtx_equal_p (op1
, true_cond
))))
11318 /* At this point we know we can use fsel. */
11320 /* Reduce the comparison to a comparison against zero. */
11321 if (! is_against_zero
)
11323 temp
= gen_reg_rtx (compare_mode
);
11324 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
11325 gen_rtx_MINUS (compare_mode
, op0
, op1
)));
11327 op1
= CONST0_RTX (compare_mode
);
11330 /* If we don't care about NaNs we can reduce some of the comparisons
11331 down to faster ones. */
11332 if (! HONOR_NANS (compare_mode
))
11338 true_cond
= false_cond
;
11351 /* Now, reduce everything down to a GE. */
11358 temp
= gen_reg_rtx (compare_mode
);
11359 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
11364 temp
= gen_reg_rtx (compare_mode
);
11365 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_ABS (compare_mode
, op0
)));
11370 temp
= gen_reg_rtx (compare_mode
);
11371 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
11372 gen_rtx_NEG (compare_mode
,
11373 gen_rtx_ABS (compare_mode
, op0
))));
11378 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
11379 temp
= gen_reg_rtx (result_mode
);
11380 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
11381 gen_rtx_IF_THEN_ELSE (result_mode
,
11382 gen_rtx_GE (VOIDmode
,
11384 true_cond
, false_cond
)));
11385 false_cond
= true_cond
;
11388 temp
= gen_reg_rtx (compare_mode
);
11389 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
11394 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
11395 temp
= gen_reg_rtx (result_mode
);
11396 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
11397 gen_rtx_IF_THEN_ELSE (result_mode
,
11398 gen_rtx_GE (VOIDmode
,
11400 true_cond
, false_cond
)));
11401 true_cond
= false_cond
;
11404 temp
= gen_reg_rtx (compare_mode
);
11405 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
11410 gcc_unreachable ();
11413 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
11414 gen_rtx_IF_THEN_ELSE (result_mode
,
11415 gen_rtx_GE (VOIDmode
,
11417 true_cond
, false_cond
)));
11421 /* Same as above, but for ints (isel). */
11424 rs6000_emit_int_cmove (rtx dest
, rtx op
, rtx true_cond
, rtx false_cond
)
11426 rtx condition_rtx
, cr
;
11428 /* All isel implementations thus far are 32-bits. */
11429 if (GET_MODE (rs6000_compare_op0
) != SImode
)
11432 /* We still have to do the compare, because isel doesn't do a
11433 compare, it just looks at the CRx bits set by a previous compare
11435 condition_rtx
= rs6000_generate_compare (GET_CODE (op
));
11436 cr
= XEXP (condition_rtx
, 0);
11438 if (GET_MODE (cr
) == CCmode
)
11439 emit_insn (gen_isel_signed (dest
, condition_rtx
,
11440 true_cond
, false_cond
, cr
));
11442 emit_insn (gen_isel_unsigned (dest
, condition_rtx
,
11443 true_cond
, false_cond
, cr
));
11449 output_isel (rtx
*operands
)
11451 enum rtx_code code
;
11453 code
= GET_CODE (operands
[1]);
11454 if (code
== GE
|| code
== GEU
|| code
== LE
|| code
== LEU
|| code
== NE
)
11456 PUT_CODE (operands
[1], reverse_condition (code
));
11457 return "isel %0,%3,%2,%j1";
11460 return "isel %0,%2,%3,%j1";
11464 rs6000_emit_minmax (rtx dest
, enum rtx_code code
, rtx op0
, rtx op1
)
11466 enum machine_mode mode
= GET_MODE (op0
);
11470 if (code
== SMAX
|| code
== SMIN
)
11475 if (code
== SMAX
|| code
== UMAX
)
11476 target
= emit_conditional_move (dest
, c
, op0
, op1
, mode
,
11477 op0
, op1
, mode
, 0);
11479 target
= emit_conditional_move (dest
, c
, op0
, op1
, mode
,
11480 op1
, op0
, mode
, 0);
11481 gcc_assert (target
);
11482 if (target
!= dest
)
11483 emit_move_insn (dest
, target
);
11486 /* Emit instructions to perform a load-reserved/store-conditional operation.
11487 The operation performed is an atomic
11488 (set M (CODE:MODE M OP))
11489 If not NULL, BEFORE is atomically set to M before the operation, and
11490 AFTER is set to M after the operation (that is, (CODE:MODE M OP)).
11491 If SYNC_P then a memory barrier is emitted before the operation.
11492 Either OP or M may be wrapped in a NOT operation. */
11495 rs6000_emit_sync (enum rtx_code code
, enum machine_mode mode
,
11496 rtx m
, rtx op
, rtx before_param
, rtx after_param
,
11499 enum machine_mode used_mode
;
11500 rtx the_op
, set_before
, set_after
, set_atomic
, cc_scratch
, before
, after
;
11503 HOST_WIDE_INT imask
= GET_MODE_MASK (mode
);
11504 rtx shift
= NULL_RTX
;
11507 emit_insn (gen_memory_barrier ());
11509 if (GET_CODE (m
) == NOT
)
11510 used_m
= XEXP (m
, 0);
11514 /* If this is smaller than SImode, we'll have to use SImode with
11516 if (mode
== QImode
|| mode
== HImode
)
11520 if (MEM_ALIGN (used_m
) >= 32)
11523 if (BYTES_BIG_ENDIAN
)
11524 ishift
= GET_MODE_BITSIZE (SImode
) - GET_MODE_BITSIZE (mode
);
11526 shift
= GEN_INT (ishift
);
11530 rtx addrSI
, aligned_addr
;
11531 int shift_mask
= mode
== QImode
? 0x18 : 0x10;
11533 addrSI
= force_reg (SImode
, gen_lowpart_common (SImode
,
11534 XEXP (used_m
, 0)));
11535 shift
= gen_reg_rtx (SImode
);
11537 emit_insn (gen_rlwinm (shift
, addrSI
, GEN_INT (3),
11538 GEN_INT (shift_mask
)));
11539 emit_insn (gen_xorsi3 (shift
, shift
, GEN_INT (shift_mask
)));
11541 aligned_addr
= expand_binop (Pmode
, and_optab
,
11543 GEN_INT (-4), NULL_RTX
,
11544 1, OPTAB_LIB_WIDEN
);
11545 used_m
= change_address (used_m
, SImode
, aligned_addr
);
11546 set_mem_align (used_m
, 32);
11547 /* It's safe to keep the old alias set of USED_M, because
11548 the operation is atomic and only affects the original
11550 if (GET_CODE (m
) == NOT
)
11551 m
= gen_rtx_NOT (SImode
, used_m
);
11556 if (GET_CODE (op
) == NOT
)
11558 oldop
= lowpart_subreg (SImode
, XEXP (op
, 0), mode
);
11559 oldop
= gen_rtx_NOT (SImode
, oldop
);
11562 oldop
= lowpart_subreg (SImode
, op
, mode
);
11567 newop
= expand_binop (SImode
, and_optab
,
11568 oldop
, GEN_INT (imask
), NULL_RTX
,
11569 1, OPTAB_LIB_WIDEN
);
11570 emit_insn (gen_ashlsi3 (newop
, newop
, shift
));
11574 newop
= expand_binop (SImode
, ior_optab
,
11575 oldop
, GEN_INT (~imask
), NULL_RTX
,
11576 1, OPTAB_LIB_WIDEN
);
11577 emit_insn (gen_rotlsi3 (newop
, newop
, shift
));
11584 newop
= expand_binop (SImode
, and_optab
,
11585 oldop
, GEN_INT (imask
), NULL_RTX
,
11586 1, OPTAB_LIB_WIDEN
);
11587 emit_insn (gen_ashlsi3 (newop
, newop
, shift
));
11589 mask
= gen_reg_rtx (SImode
);
11590 emit_move_insn (mask
, GEN_INT (imask
));
11591 emit_insn (gen_ashlsi3 (mask
, mask
, shift
));
11593 newop
= gen_rtx_AND (SImode
, gen_rtx_PLUS (SImode
, m
, newop
),
11595 newop
= gen_rtx_IOR (SImode
, newop
,
11596 gen_rtx_AND (SImode
,
11597 gen_rtx_NOT (SImode
, mask
),
11603 gcc_unreachable ();
11606 if (GET_CODE (m
) == NOT
)
11610 mask
= gen_reg_rtx (SImode
);
11611 emit_move_insn (mask
, GEN_INT (imask
));
11612 emit_insn (gen_ashlsi3 (mask
, mask
, shift
));
11614 xorm
= gen_rtx_XOR (SImode
, used_m
, mask
);
11615 /* Depending on the value of 'op', the XOR or the operation might
11616 be able to be simplified away. */
11617 newop
= simplify_gen_binary (code
, SImode
, xorm
, newop
);
11620 used_mode
= SImode
;
11621 before
= gen_reg_rtx (used_mode
);
11622 after
= gen_reg_rtx (used_mode
);
11627 before
= before_param
;
11628 after
= after_param
;
11630 if (before
== NULL_RTX
)
11631 before
= gen_reg_rtx (used_mode
);
11632 if (after
== NULL_RTX
)
11633 after
= gen_reg_rtx (used_mode
);
11636 if ((code
== PLUS
|| GET_CODE (m
) == NOT
) && used_mode
!= mode
)
11637 the_op
= op
; /* Computed above. */
11638 else if (GET_CODE (op
) == NOT
&& GET_CODE (m
) != NOT
)
11639 the_op
= gen_rtx_fmt_ee (code
, used_mode
, op
, m
);
11641 the_op
= gen_rtx_fmt_ee (code
, used_mode
, m
, op
);
11643 set_after
= gen_rtx_SET (VOIDmode
, after
, the_op
);
11644 set_before
= gen_rtx_SET (VOIDmode
, before
, used_m
);
11645 set_atomic
= gen_rtx_SET (VOIDmode
, used_m
,
11646 gen_rtx_UNSPEC_VOLATILE (used_mode
,
11647 gen_rtvec (1, the_op
),
11649 cc_scratch
= gen_rtx_CLOBBER (VOIDmode
, gen_rtx_SCRATCH (CCmode
));
11651 if (code
== PLUS
&& used_mode
!= mode
)
11652 vec
= gen_rtvec (5, set_after
, set_before
, set_atomic
, cc_scratch
,
11653 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_SCRATCH (SImode
)));
11655 vec
= gen_rtvec (4, set_after
, set_before
, set_atomic
, cc_scratch
);
11656 emit_insn (gen_rtx_PARALLEL (VOIDmode
, vec
));
11658 /* Shift and mask the return values properly. */
11659 if (used_mode
!= mode
&& before_param
)
11661 emit_insn (gen_lshrsi3 (before
, before
, shift
));
11662 convert_move (before_param
, before
, 1);
11665 if (used_mode
!= mode
&& after_param
)
11667 emit_insn (gen_lshrsi3 (after
, after
, shift
));
11668 convert_move (after_param
, after
, 1);
11671 /* The previous sequence will end with a branch that's dependent on
11672 the conditional store, so placing an isync will ensure that no
11673 other instructions (especially, no load or store instructions)
11674 can start before the atomic operation completes. */
11676 emit_insn (gen_isync ());
11679 /* A subroutine of the atomic operation splitters. Jump to LABEL if
11680 COND is true. Mark the jump as unlikely to be taken. */
11683 emit_unlikely_jump (rtx cond
, rtx label
)
11685 rtx very_unlikely
= GEN_INT (REG_BR_PROB_BASE
/ 100 - 1);
11688 x
= gen_rtx_IF_THEN_ELSE (VOIDmode
, cond
, label
, pc_rtx
);
11689 x
= emit_jump_insn (gen_rtx_SET (VOIDmode
, pc_rtx
, x
));
11690 REG_NOTES (x
) = gen_rtx_EXPR_LIST (REG_BR_PROB
, very_unlikely
, NULL_RTX
);
11693 /* A subroutine of the atomic operation splitters. Emit a load-locked
11694 instruction in MODE. */
11697 emit_load_locked (enum machine_mode mode
, rtx reg
, rtx mem
)
11699 rtx (*fn
) (rtx
, rtx
) = NULL
;
11700 if (mode
== SImode
)
11701 fn
= gen_load_locked_si
;
11702 else if (mode
== DImode
)
11703 fn
= gen_load_locked_di
;
11704 emit_insn (fn (reg
, mem
));
11707 /* A subroutine of the atomic operation splitters. Emit a store-conditional
11708 instruction in MODE. */
11711 emit_store_conditional (enum machine_mode mode
, rtx res
, rtx mem
, rtx val
)
11713 rtx (*fn
) (rtx
, rtx
, rtx
) = NULL
;
11714 if (mode
== SImode
)
11715 fn
= gen_store_conditional_si
;
11716 else if (mode
== DImode
)
11717 fn
= gen_store_conditional_di
;
11719 if (PPC405_ERRATUM77
)
11720 emit_insn (gen_memory_barrier ());
11722 emit_insn (fn (res
, mem
, val
));
11725 /* Expand an atomic compare and swap operation. MEM is the memory on which
11726 to operate. OLDVAL is the old value to be compared. NEWVAL is the new
11727 value to be stored. SCRATCH is a scratch GPR. */
11730 rs6000_split_compare_and_swap (rtx retval
, rtx mem
, rtx oldval
, rtx newval
,
11733 enum machine_mode mode
= GET_MODE (mem
);
11734 rtx label1
, label2
, x
, cond
= gen_rtx_REG (CCmode
, CR0_REGNO
);
11736 emit_insn (gen_memory_barrier ());
11738 label1
= gen_rtx_LABEL_REF (VOIDmode
, gen_label_rtx ());
11739 label2
= gen_rtx_LABEL_REF (VOIDmode
, gen_label_rtx ());
11740 emit_label (XEXP (label1
, 0));
11742 emit_load_locked (mode
, retval
, mem
);
11744 x
= gen_rtx_COMPARE (CCmode
, retval
, oldval
);
11745 emit_insn (gen_rtx_SET (VOIDmode
, cond
, x
));
11747 x
= gen_rtx_NE (VOIDmode
, cond
, const0_rtx
);
11748 emit_unlikely_jump (x
, label2
);
11750 emit_move_insn (scratch
, newval
);
11751 emit_store_conditional (mode
, cond
, mem
, scratch
);
11753 x
= gen_rtx_NE (VOIDmode
, cond
, const0_rtx
);
11754 emit_unlikely_jump (x
, label1
);
11756 emit_insn (gen_isync ());
11757 emit_label (XEXP (label2
, 0));
11760 /* Expand an atomic test and set operation. MEM is the memory on which
11761 to operate. VAL is the value set. SCRATCH is a scratch GPR. */
11764 rs6000_split_lock_test_and_set (rtx retval
, rtx mem
, rtx val
, rtx scratch
)
11766 enum machine_mode mode
= GET_MODE (mem
);
11767 rtx label
, x
, cond
= gen_rtx_REG (CCmode
, CR0_REGNO
);
11769 emit_insn (gen_memory_barrier ());
11771 label
= gen_rtx_LABEL_REF (VOIDmode
, gen_label_rtx ());
11772 emit_label (XEXP (label
, 0));
11774 emit_load_locked (mode
, retval
, mem
);
11775 emit_move_insn (scratch
, val
);
11776 emit_store_conditional (mode
, cond
, mem
, scratch
);
11778 x
= gen_rtx_NE (VOIDmode
, cond
, const0_rtx
);
11779 emit_unlikely_jump (x
, label
);
11781 emit_insn (gen_isync ());
11784 /* Emit instructions to move SRC to DST. Called by splitters for
11785 multi-register moves. It will emit at most one instruction for
11786 each register that is accessed; that is, it won't emit li/lis pairs
11787 (or equivalent for 64-bit code). One of SRC or DST must be a hard
11791 rs6000_split_multireg_move (rtx dst
, rtx src
)
11793 /* The register number of the first register being moved. */
11795 /* The mode that is to be moved. */
11796 enum machine_mode mode
;
11797 /* The mode that the move is being done in, and its size. */
11798 enum machine_mode reg_mode
;
11800 /* The number of registers that will be moved. */
11803 reg
= REG_P (dst
) ? REGNO (dst
) : REGNO (src
);
11804 mode
= GET_MODE (dst
);
11805 nregs
= hard_regno_nregs
[reg
][mode
];
11806 if (FP_REGNO_P (reg
))
11808 else if (ALTIVEC_REGNO_P (reg
))
11809 reg_mode
= V16QImode
;
11811 reg_mode
= word_mode
;
11812 reg_mode_size
= GET_MODE_SIZE (reg_mode
);
11814 gcc_assert (reg_mode_size
* nregs
== GET_MODE_SIZE (mode
));
11816 if (REG_P (src
) && REG_P (dst
) && (REGNO (src
) < REGNO (dst
)))
11818 /* Move register range backwards, if we might have destructive
11821 for (i
= nregs
- 1; i
>= 0; i
--)
11822 emit_insn (gen_rtx_SET (VOIDmode
,
11823 simplify_gen_subreg (reg_mode
, dst
, mode
,
11824 i
* reg_mode_size
),
11825 simplify_gen_subreg (reg_mode
, src
, mode
,
11826 i
* reg_mode_size
)));
11832 bool used_update
= false;
11834 if (MEM_P (src
) && INT_REGNO_P (reg
))
11838 if (GET_CODE (XEXP (src
, 0)) == PRE_INC
11839 || GET_CODE (XEXP (src
, 0)) == PRE_DEC
)
11842 breg
= XEXP (XEXP (src
, 0), 0);
11843 delta_rtx
= (GET_CODE (XEXP (src
, 0)) == PRE_INC
11844 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src
)))
11845 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src
))));
11846 emit_insn (TARGET_32BIT
11847 ? gen_addsi3 (breg
, breg
, delta_rtx
)
11848 : gen_adddi3 (breg
, breg
, delta_rtx
));
11849 src
= gen_rtx_MEM (mode
, breg
);
11851 else if (! offsettable_memref_p (src
))
11853 rtx newsrc
, basereg
;
11854 basereg
= gen_rtx_REG (Pmode
, reg
);
11855 emit_insn (gen_rtx_SET (VOIDmode
, basereg
, XEXP (src
, 0)));
11856 newsrc
= gen_rtx_MEM (GET_MODE (src
), basereg
);
11857 MEM_COPY_ATTRIBUTES (newsrc
, src
);
11861 breg
= XEXP (src
, 0);
11862 if (GET_CODE (breg
) == PLUS
|| GET_CODE (breg
) == LO_SUM
)
11863 breg
= XEXP (breg
, 0);
11865 /* If the base register we are using to address memory is
11866 also a destination reg, then change that register last. */
11868 && REGNO (breg
) >= REGNO (dst
)
11869 && REGNO (breg
) < REGNO (dst
) + nregs
)
11870 j
= REGNO (breg
) - REGNO (dst
);
11873 if (GET_CODE (dst
) == MEM
&& INT_REGNO_P (reg
))
11877 if (GET_CODE (XEXP (dst
, 0)) == PRE_INC
11878 || GET_CODE (XEXP (dst
, 0)) == PRE_DEC
)
11881 breg
= XEXP (XEXP (dst
, 0), 0);
11882 delta_rtx
= (GET_CODE (XEXP (dst
, 0)) == PRE_INC
11883 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst
)))
11884 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst
))));
11886 /* We have to update the breg before doing the store.
11887 Use store with update, if available. */
11891 rtx nsrc
= simplify_gen_subreg (reg_mode
, src
, mode
, 0);
11892 emit_insn (TARGET_32BIT
11893 ? (TARGET_POWERPC64
11894 ? gen_movdi_si_update (breg
, breg
, delta_rtx
, nsrc
)
11895 : gen_movsi_update (breg
, breg
, delta_rtx
, nsrc
))
11896 : gen_movdi_di_update (breg
, breg
, delta_rtx
, nsrc
));
11897 used_update
= true;
11900 emit_insn (TARGET_32BIT
11901 ? gen_addsi3 (breg
, breg
, delta_rtx
)
11902 : gen_adddi3 (breg
, breg
, delta_rtx
));
11903 dst
= gen_rtx_MEM (mode
, breg
);
11906 gcc_assert (offsettable_memref_p (dst
));
11909 for (i
= 0; i
< nregs
; i
++)
11911 /* Calculate index to next subword. */
11916 /* If compiler already emitted move of first word by
11917 store with update, no need to do anything. */
11918 if (j
== 0 && used_update
)
11921 emit_insn (gen_rtx_SET (VOIDmode
,
11922 simplify_gen_subreg (reg_mode
, dst
, mode
,
11923 j
* reg_mode_size
),
11924 simplify_gen_subreg (reg_mode
, src
, mode
,
11925 j
* reg_mode_size
)));
11931 /* This page contains routines that are used to determine what the
11932 function prologue and epilogue code will do and write them out. */
11934 /* Return the first fixed-point register that is required to be
11935 saved. 32 if none. */
11938 first_reg_to_save (void)
11942 /* Find lowest numbered live register. */
11943 for (first_reg
= 13; first_reg
<= 31; first_reg
++)
11944 if (regs_ever_live
[first_reg
]
11945 && (! call_used_regs
[first_reg
]
11946 || (first_reg
== RS6000_PIC_OFFSET_TABLE_REGNUM
11947 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
11948 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
)
11949 || (TARGET_TOC
&& TARGET_MINIMAL_TOC
)))))
11954 && current_function_uses_pic_offset_table
11955 && first_reg
> RS6000_PIC_OFFSET_TABLE_REGNUM
)
11956 return RS6000_PIC_OFFSET_TABLE_REGNUM
;
11962 /* Similar, for FP regs. */
11965 first_fp_reg_to_save (void)
11969 /* Find lowest numbered live register. */
11970 for (first_reg
= 14 + 32; first_reg
<= 63; first_reg
++)
11971 if (regs_ever_live
[first_reg
])
11977 /* Similar, for AltiVec regs. */
11980 first_altivec_reg_to_save (void)
11984 /* Stack frame remains as is unless we are in AltiVec ABI. */
11985 if (! TARGET_ALTIVEC_ABI
)
11986 return LAST_ALTIVEC_REGNO
+ 1;
11988 /* Find lowest numbered live register. */
11989 for (i
= FIRST_ALTIVEC_REGNO
+ 20; i
<= LAST_ALTIVEC_REGNO
; ++i
)
11990 if (regs_ever_live
[i
])
11996 /* Return a 32-bit mask of the AltiVec registers we need to set in
11997 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
11998 the 32-bit word is 0. */
12000 static unsigned int
12001 compute_vrsave_mask (void)
12003 unsigned int i
, mask
= 0;
12005 /* First, find out if we use _any_ altivec registers. */
12006 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
12007 if (regs_ever_live
[i
])
12008 mask
|= ALTIVEC_REG_BIT (i
);
12013 /* Next, remove the argument registers from the set. These must
12014 be in the VRSAVE mask set by the caller, so we don't need to add
12015 them in again. More importantly, the mask we compute here is
12016 used to generate CLOBBERs in the set_vrsave insn, and we do not
12017 wish the argument registers to die. */
12018 for (i
= cfun
->args_info
.vregno
- 1; i
>= ALTIVEC_ARG_MIN_REG
; --i
)
12019 mask
&= ~ALTIVEC_REG_BIT (i
);
12021 /* Similarly, remove the return value from the set. */
12024 diddle_return_value (is_altivec_return_reg
, &yes
);
12026 mask
&= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN
);
12032 /* For a very restricted set of circumstances, we can cut down the
12033 size of prologues/epilogues by calling our own save/restore-the-world
12037 compute_save_world_info (rs6000_stack_t
*info_ptr
)
12039 info_ptr
->world_save_p
= 1;
12040 info_ptr
->world_save_p
12041 = (WORLD_SAVE_P (info_ptr
)
12042 && DEFAULT_ABI
== ABI_DARWIN
12043 && ! (current_function_calls_setjmp
&& flag_exceptions
)
12044 && info_ptr
->first_fp_reg_save
== FIRST_SAVED_FP_REGNO
12045 && info_ptr
->first_gp_reg_save
== FIRST_SAVED_GP_REGNO
12046 && info_ptr
->first_altivec_reg_save
== FIRST_SAVED_ALTIVEC_REGNO
12047 && info_ptr
->cr_save_p
);
12049 /* This will not work in conjunction with sibcalls. Make sure there
12050 are none. (This check is expensive, but seldom executed.) */
12051 if (WORLD_SAVE_P (info_ptr
))
12054 for ( insn
= get_last_insn_anywhere (); insn
; insn
= PREV_INSN (insn
))
12055 if ( GET_CODE (insn
) == CALL_INSN
12056 && SIBLING_CALL_P (insn
))
12058 info_ptr
->world_save_p
= 0;
12063 if (WORLD_SAVE_P (info_ptr
))
12065 /* Even if we're not touching VRsave, make sure there's room on the
12066 stack for it, if it looks like we're calling SAVE_WORLD, which
12067 will attempt to save it. */
12068 info_ptr
->vrsave_size
= 4;
12070 /* "Save" the VRsave register too if we're saving the world. */
12071 if (info_ptr
->vrsave_mask
== 0)
12072 info_ptr
->vrsave_mask
= compute_vrsave_mask ();
12074 /* Because the Darwin register save/restore routines only handle
12075 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
12077 gcc_assert (info_ptr
->first_fp_reg_save
>= FIRST_SAVED_FP_REGNO
12078 && (info_ptr
->first_altivec_reg_save
12079 >= FIRST_SAVED_ALTIVEC_REGNO
));
12086 is_altivec_return_reg (rtx reg
, void *xyes
)
12088 bool *yes
= (bool *) xyes
;
12089 if (REGNO (reg
) == ALTIVEC_ARG_RETURN
)
12094 /* Calculate the stack information for the current function. This is
12095 complicated by having two separate calling sequences, the AIX calling
12096 sequence and the V.4 calling sequence.
12098 AIX (and Darwin/Mac OS X) stack frames look like:
12100 SP----> +---------------------------------------+
12101 | back chain to caller | 0 0
12102 +---------------------------------------+
12103 | saved CR | 4 8 (8-11)
12104 +---------------------------------------+
12106 +---------------------------------------+
12107 | reserved for compilers | 12 24
12108 +---------------------------------------+
12109 | reserved for binders | 16 32
12110 +---------------------------------------+
12111 | saved TOC pointer | 20 40
12112 +---------------------------------------+
12113 | Parameter save area (P) | 24 48
12114 +---------------------------------------+
12115 | Alloca space (A) | 24+P etc.
12116 +---------------------------------------+
12117 | Local variable space (L) | 24+P+A
12118 +---------------------------------------+
12119 | Float/int conversion temporary (X) | 24+P+A+L
12120 +---------------------------------------+
12121 | Save area for AltiVec registers (W) | 24+P+A+L+X
12122 +---------------------------------------+
12123 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
12124 +---------------------------------------+
12125 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
12126 +---------------------------------------+
12127 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
12128 +---------------------------------------+
12129 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
12130 +---------------------------------------+
12131 old SP->| back chain to caller's caller |
12132 +---------------------------------------+
12134 The required alignment for AIX configurations is two words (i.e., 8
12138 V.4 stack frames look like:
12140 SP----> +---------------------------------------+
12141 | back chain to caller | 0
12142 +---------------------------------------+
12143 | caller's saved LR | 4
12144 +---------------------------------------+
12145 | Parameter save area (P) | 8
12146 +---------------------------------------+
12147 | Alloca space (A) | 8+P
12148 +---------------------------------------+
12149 | Varargs save area (V) | 8+P+A
12150 +---------------------------------------+
12151 | Local variable space (L) | 8+P+A+V
12152 +---------------------------------------+
12153 | Float/int conversion temporary (X) | 8+P+A+V+L
12154 +---------------------------------------+
12155 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
12156 +---------------------------------------+
12157 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
12158 +---------------------------------------+
12159 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
12160 +---------------------------------------+
12161 | SPE: area for 64-bit GP registers |
12162 +---------------------------------------+
12163 | SPE alignment padding |
12164 +---------------------------------------+
12165 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
12166 +---------------------------------------+
12167 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
12168 +---------------------------------------+
12169 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
12170 +---------------------------------------+
12171 old SP->| back chain to caller's caller |
12172 +---------------------------------------+
12174 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
12175 given. (But note below and in sysv4.h that we require only 8 and
12176 may round up the size of our stack frame anyways. The historical
12177 reason is early versions of powerpc-linux which didn't properly
12178 align the stack at program startup. A happy side-effect is that
12179 -mno-eabi libraries can be used with -meabi programs.)
12181 The EABI configuration defaults to the V.4 layout. However,
12182 the stack alignment requirements may differ. If -mno-eabi is not
12183 given, the required stack alignment is 8 bytes; if -mno-eabi is
12184 given, the required alignment is 16 bytes. (But see V.4 comment
12187 #ifndef ABI_STACK_BOUNDARY
12188 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
12191 static rs6000_stack_t
*
12192 rs6000_stack_info (void)
12194 static rs6000_stack_t info
, zero_info
;
12195 rs6000_stack_t
*info_ptr
= &info
;
12196 int reg_size
= TARGET_32BIT
? 4 : 8;
12199 HOST_WIDE_INT non_fixed_size
;
12201 /* Zero all fields portably. */
12206 /* Cache value so we don't rescan instruction chain over and over. */
12207 if (cfun
->machine
->insn_chain_scanned_p
== 0)
12208 cfun
->machine
->insn_chain_scanned_p
12209 = spe_func_has_64bit_regs_p () + 1;
12210 info_ptr
->spe_64bit_regs_used
= cfun
->machine
->insn_chain_scanned_p
- 1;
12213 /* Select which calling sequence. */
12214 info_ptr
->abi
= DEFAULT_ABI
;
12216 /* Calculate which registers need to be saved & save area size. */
12217 info_ptr
->first_gp_reg_save
= first_reg_to_save ();
12218 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
12219 even if it currently looks like we won't. */
12220 if (((TARGET_TOC
&& TARGET_MINIMAL_TOC
)
12221 || (flag_pic
== 1 && DEFAULT_ABI
== ABI_V4
)
12222 || (flag_pic
&& DEFAULT_ABI
== ABI_DARWIN
))
12223 && info_ptr
->first_gp_reg_save
> RS6000_PIC_OFFSET_TABLE_REGNUM
)
12224 info_ptr
->gp_size
= reg_size
* (32 - RS6000_PIC_OFFSET_TABLE_REGNUM
);
12226 info_ptr
->gp_size
= reg_size
* (32 - info_ptr
->first_gp_reg_save
);
12228 /* For the SPE, we have an additional upper 32-bits on each GPR.
12229 Ideally we should save the entire 64-bits only when the upper
12230 half is used in SIMD instructions. Since we only record
12231 registers live (not the size they are used in), this proves
12232 difficult because we'd have to traverse the instruction chain at
12233 the right time, taking reload into account. This is a real pain,
12234 so we opt to save the GPRs in 64-bits always if but one register
12235 gets used in 64-bits. Otherwise, all the registers in the frame
12236 get saved in 32-bits.
12238 So... since when we save all GPRs (except the SP) in 64-bits, the
12239 traditional GP save area will be empty. */
12240 if (TARGET_SPE_ABI
&& info_ptr
->spe_64bit_regs_used
!= 0)
12241 info_ptr
->gp_size
= 0;
12243 info_ptr
->first_fp_reg_save
= first_fp_reg_to_save ();
12244 info_ptr
->fp_size
= 8 * (64 - info_ptr
->first_fp_reg_save
);
12246 info_ptr
->first_altivec_reg_save
= first_altivec_reg_to_save ();
12247 info_ptr
->altivec_size
= 16 * (LAST_ALTIVEC_REGNO
+ 1
12248 - info_ptr
->first_altivec_reg_save
);
12250 /* Does this function call anything? */
12251 info_ptr
->calls_p
= (! current_function_is_leaf
12252 || cfun
->machine
->ra_needs_full_frame
);
12254 /* Determine if we need to save the link register. */
12255 if (rs6000_ra_ever_killed ()
12256 || (DEFAULT_ABI
== ABI_AIX
12257 && current_function_profile
12258 && !TARGET_PROFILE_KERNEL
)
12259 #ifdef TARGET_RELOCATABLE
12260 || (TARGET_RELOCATABLE
&& (get_pool_size () != 0))
12262 || (info_ptr
->first_fp_reg_save
!= 64
12263 && !FP_SAVE_INLINE (info_ptr
->first_fp_reg_save
))
12264 || info_ptr
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
12265 || (DEFAULT_ABI
== ABI_V4
&& current_function_calls_alloca
)
12266 || info_ptr
->calls_p
)
12268 info_ptr
->lr_save_p
= 1;
12269 regs_ever_live
[LINK_REGISTER_REGNUM
] = 1;
12272 /* Determine if we need to save the condition code registers. */
12273 if (regs_ever_live
[CR2_REGNO
]
12274 || regs_ever_live
[CR3_REGNO
]
12275 || regs_ever_live
[CR4_REGNO
])
12277 info_ptr
->cr_save_p
= 1;
12278 if (DEFAULT_ABI
== ABI_V4
)
12279 info_ptr
->cr_size
= reg_size
;
12282 /* If the current function calls __builtin_eh_return, then we need
12283 to allocate stack space for registers that will hold data for
12284 the exception handler. */
12285 if (current_function_calls_eh_return
)
12288 for (i
= 0; EH_RETURN_DATA_REGNO (i
) != INVALID_REGNUM
; ++i
)
12291 /* SPE saves EH registers in 64-bits. */
12292 ehrd_size
= i
* (TARGET_SPE_ABI
12293 && info_ptr
->spe_64bit_regs_used
!= 0
12294 ? UNITS_PER_SPE_WORD
: UNITS_PER_WORD
);
12299 /* Determine various sizes. */
12300 info_ptr
->reg_size
= reg_size
;
12301 info_ptr
->fixed_size
= RS6000_SAVE_AREA
;
12302 info_ptr
->vars_size
= RS6000_ALIGN (get_frame_size (), 8);
12303 info_ptr
->parm_size
= RS6000_ALIGN (current_function_outgoing_args_size
,
12304 TARGET_ALTIVEC
? 16 : 8);
12305 if (FRAME_GROWS_DOWNWARD
)
12306 info_ptr
->vars_size
12307 += RS6000_ALIGN (info_ptr
->fixed_size
+ info_ptr
->vars_size
12308 + info_ptr
->parm_size
,
12309 ABI_STACK_BOUNDARY
/ BITS_PER_UNIT
)
12310 - (info_ptr
->fixed_size
+ info_ptr
->vars_size
12311 + info_ptr
->parm_size
);
12313 if (TARGET_SPE_ABI
&& info_ptr
->spe_64bit_regs_used
!= 0)
12314 info_ptr
->spe_gp_size
= 8 * (32 - info_ptr
->first_gp_reg_save
);
12316 info_ptr
->spe_gp_size
= 0;
12318 if (TARGET_ALTIVEC_ABI
)
12319 info_ptr
->vrsave_mask
= compute_vrsave_mask ();
12321 info_ptr
->vrsave_mask
= 0;
12323 if (TARGET_ALTIVEC_VRSAVE
&& info_ptr
->vrsave_mask
)
12324 info_ptr
->vrsave_size
= 4;
12326 info_ptr
->vrsave_size
= 0;
12328 compute_save_world_info (info_ptr
);
12330 /* Calculate the offsets. */
12331 switch (DEFAULT_ABI
)
12335 gcc_unreachable ();
12339 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
12340 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
12342 if (TARGET_ALTIVEC_ABI
)
12344 info_ptr
->vrsave_save_offset
12345 = info_ptr
->gp_save_offset
- info_ptr
->vrsave_size
;
12347 /* Align stack so vector save area is on a quadword boundary. */
12348 if (info_ptr
->altivec_size
!= 0)
12349 info_ptr
->altivec_padding_size
12350 = 16 - (-info_ptr
->vrsave_save_offset
% 16);
12352 info_ptr
->altivec_padding_size
= 0;
12354 info_ptr
->altivec_save_offset
12355 = info_ptr
->vrsave_save_offset
12356 - info_ptr
->altivec_padding_size
12357 - info_ptr
->altivec_size
;
12359 /* Adjust for AltiVec case. */
12360 info_ptr
->ehrd_offset
= info_ptr
->altivec_save_offset
- ehrd_size
;
12363 info_ptr
->ehrd_offset
= info_ptr
->gp_save_offset
- ehrd_size
;
12364 info_ptr
->cr_save_offset
= reg_size
; /* first word when 64-bit. */
12365 info_ptr
->lr_save_offset
= 2*reg_size
;
12369 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
12370 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
12371 info_ptr
->cr_save_offset
= info_ptr
->gp_save_offset
- info_ptr
->cr_size
;
12373 if (TARGET_SPE_ABI
&& info_ptr
->spe_64bit_regs_used
!= 0)
12375 /* Align stack so SPE GPR save area is aligned on a
12376 double-word boundary. */
12377 if (info_ptr
->spe_gp_size
!= 0)
12378 info_ptr
->spe_padding_size
12379 = 8 - (-info_ptr
->cr_save_offset
% 8);
12381 info_ptr
->spe_padding_size
= 0;
12383 info_ptr
->spe_gp_save_offset
12384 = info_ptr
->cr_save_offset
12385 - info_ptr
->spe_padding_size
12386 - info_ptr
->spe_gp_size
;
12388 /* Adjust for SPE case. */
12389 info_ptr
->toc_save_offset
12390 = info_ptr
->spe_gp_save_offset
- info_ptr
->toc_size
;
12392 else if (TARGET_ALTIVEC_ABI
)
12394 info_ptr
->vrsave_save_offset
12395 = info_ptr
->cr_save_offset
- info_ptr
->vrsave_size
;
12397 /* Align stack so vector save area is on a quadword boundary. */
12398 if (info_ptr
->altivec_size
!= 0)
12399 info_ptr
->altivec_padding_size
12400 = 16 - (-info_ptr
->vrsave_save_offset
% 16);
12402 info_ptr
->altivec_padding_size
= 0;
12404 info_ptr
->altivec_save_offset
12405 = info_ptr
->vrsave_save_offset
12406 - info_ptr
->altivec_padding_size
12407 - info_ptr
->altivec_size
;
12409 /* Adjust for AltiVec case. */
12410 info_ptr
->toc_save_offset
12411 = info_ptr
->altivec_save_offset
- info_ptr
->toc_size
;
12414 info_ptr
->toc_save_offset
= info_ptr
->cr_save_offset
- info_ptr
->toc_size
;
12415 info_ptr
->ehrd_offset
= info_ptr
->toc_save_offset
- ehrd_size
;
12416 info_ptr
->lr_save_offset
= reg_size
;
12420 save_align
= (TARGET_ALTIVEC_ABI
|| DEFAULT_ABI
== ABI_DARWIN
) ? 16 : 8;
12421 info_ptr
->save_size
= RS6000_ALIGN (info_ptr
->fp_size
12422 + info_ptr
->gp_size
12423 + info_ptr
->altivec_size
12424 + info_ptr
->altivec_padding_size
12425 + info_ptr
->spe_gp_size
12426 + info_ptr
->spe_padding_size
12428 + info_ptr
->cr_size
12429 + info_ptr
->lr_size
12430 + info_ptr
->vrsave_size
12431 + info_ptr
->toc_size
,
12434 non_fixed_size
= (info_ptr
->vars_size
12435 + info_ptr
->parm_size
12436 + info_ptr
->save_size
);
12438 info_ptr
->total_size
= RS6000_ALIGN (non_fixed_size
+ info_ptr
->fixed_size
,
12439 ABI_STACK_BOUNDARY
/ BITS_PER_UNIT
);
12441 /* Determine if we need to allocate any stack frame:
12443 For AIX we need to push the stack if a frame pointer is needed
12444 (because the stack might be dynamically adjusted), if we are
12445 debugging, if we make calls, or if the sum of fp_save, gp_save,
12446 and local variables are more than the space needed to save all
12447 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
12448 + 18*8 = 288 (GPR13 reserved).
12450 For V.4 we don't have the stack cushion that AIX uses, but assume
12451 that the debugger can handle stackless frames. */
12453 if (info_ptr
->calls_p
)
12454 info_ptr
->push_p
= 1;
12456 else if (DEFAULT_ABI
== ABI_V4
)
12457 info_ptr
->push_p
= non_fixed_size
!= 0;
12459 else if (frame_pointer_needed
)
12460 info_ptr
->push_p
= 1;
12462 else if (TARGET_XCOFF
&& write_symbols
!= NO_DEBUG
)
12463 info_ptr
->push_p
= 1;
12466 info_ptr
->push_p
= non_fixed_size
> (TARGET_32BIT
? 220 : 288);
12468 /* Zero offsets if we're not saving those registers. */
12469 if (info_ptr
->fp_size
== 0)
12470 info_ptr
->fp_save_offset
= 0;
12472 if (info_ptr
->gp_size
== 0)
12473 info_ptr
->gp_save_offset
= 0;
12475 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->altivec_size
== 0)
12476 info_ptr
->altivec_save_offset
= 0;
12478 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->vrsave_mask
== 0)
12479 info_ptr
->vrsave_save_offset
= 0;
12481 if (! TARGET_SPE_ABI
12482 || info_ptr
->spe_64bit_regs_used
== 0
12483 || info_ptr
->spe_gp_size
== 0)
12484 info_ptr
->spe_gp_save_offset
= 0;
12486 if (! info_ptr
->lr_save_p
)
12487 info_ptr
->lr_save_offset
= 0;
12489 if (! info_ptr
->cr_save_p
)
12490 info_ptr
->cr_save_offset
= 0;
12492 if (! info_ptr
->toc_save_p
)
12493 info_ptr
->toc_save_offset
= 0;
12498 /* Return true if the current function uses any GPRs in 64-bit SIMD
12502 spe_func_has_64bit_regs_p (void)
12506 /* Functions that save and restore all the call-saved registers will
12507 need to save/restore the registers in 64-bits. */
12508 if (current_function_calls_eh_return
12509 || current_function_calls_setjmp
12510 || current_function_has_nonlocal_goto
)
12513 insns
= get_insns ();
12515 for (insn
= NEXT_INSN (insns
); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
12521 /* FIXME: This should be implemented with attributes...
12523 (set_attr "spe64" "true")....then,
12524 if (get_spe64(insn)) return true;
12526 It's the only reliable way to do the stuff below. */
12528 i
= PATTERN (insn
);
12529 if (GET_CODE (i
) == SET
)
12531 enum machine_mode mode
= GET_MODE (SET_SRC (i
));
12533 if (SPE_VECTOR_MODE (mode
))
12535 if (TARGET_E500_DOUBLE
&& mode
== DFmode
)
12545 debug_stack_info (rs6000_stack_t
*info
)
12547 const char *abi_string
;
12550 info
= rs6000_stack_info ();
12552 fprintf (stderr
, "\nStack information for function %s:\n",
12553 ((current_function_decl
&& DECL_NAME (current_function_decl
))
12554 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl
))
12559 default: abi_string
= "Unknown"; break;
12560 case ABI_NONE
: abi_string
= "NONE"; break;
12561 case ABI_AIX
: abi_string
= "AIX"; break;
12562 case ABI_DARWIN
: abi_string
= "Darwin"; break;
12563 case ABI_V4
: abi_string
= "V.4"; break;
12566 fprintf (stderr
, "\tABI = %5s\n", abi_string
);
12568 if (TARGET_ALTIVEC_ABI
)
12569 fprintf (stderr
, "\tALTIVEC ABI extensions enabled.\n");
12571 if (TARGET_SPE_ABI
)
12572 fprintf (stderr
, "\tSPE ABI extensions enabled.\n");
12574 if (info
->first_gp_reg_save
!= 32)
12575 fprintf (stderr
, "\tfirst_gp_reg_save = %5d\n", info
->first_gp_reg_save
);
12577 if (info
->first_fp_reg_save
!= 64)
12578 fprintf (stderr
, "\tfirst_fp_reg_save = %5d\n", info
->first_fp_reg_save
);
12580 if (info
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
)
12581 fprintf (stderr
, "\tfirst_altivec_reg_save = %5d\n",
12582 info
->first_altivec_reg_save
);
12584 if (info
->lr_save_p
)
12585 fprintf (stderr
, "\tlr_save_p = %5d\n", info
->lr_save_p
);
12587 if (info
->cr_save_p
)
12588 fprintf (stderr
, "\tcr_save_p = %5d\n", info
->cr_save_p
);
12590 if (info
->toc_save_p
)
12591 fprintf (stderr
, "\ttoc_save_p = %5d\n", info
->toc_save_p
);
12593 if (info
->vrsave_mask
)
12594 fprintf (stderr
, "\tvrsave_mask = 0x%x\n", info
->vrsave_mask
);
12597 fprintf (stderr
, "\tpush_p = %5d\n", info
->push_p
);
12600 fprintf (stderr
, "\tcalls_p = %5d\n", info
->calls_p
);
12602 if (info
->gp_save_offset
)
12603 fprintf (stderr
, "\tgp_save_offset = %5d\n", info
->gp_save_offset
);
12605 if (info
->fp_save_offset
)
12606 fprintf (stderr
, "\tfp_save_offset = %5d\n", info
->fp_save_offset
);
12608 if (info
->altivec_save_offset
)
12609 fprintf (stderr
, "\taltivec_save_offset = %5d\n",
12610 info
->altivec_save_offset
);
12612 if (info
->spe_gp_save_offset
)
12613 fprintf (stderr
, "\tspe_gp_save_offset = %5d\n",
12614 info
->spe_gp_save_offset
);
12616 if (info
->vrsave_save_offset
)
12617 fprintf (stderr
, "\tvrsave_save_offset = %5d\n",
12618 info
->vrsave_save_offset
);
12620 if (info
->lr_save_offset
)
12621 fprintf (stderr
, "\tlr_save_offset = %5d\n", info
->lr_save_offset
);
12623 if (info
->cr_save_offset
)
12624 fprintf (stderr
, "\tcr_save_offset = %5d\n", info
->cr_save_offset
);
12626 if (info
->toc_save_offset
)
12627 fprintf (stderr
, "\ttoc_save_offset = %5d\n", info
->toc_save_offset
);
12629 if (info
->varargs_save_offset
)
12630 fprintf (stderr
, "\tvarargs_save_offset = %5d\n", info
->varargs_save_offset
);
12632 if (info
->total_size
)
12633 fprintf (stderr
, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC
"\n",
12636 if (info
->vars_size
)
12637 fprintf (stderr
, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC
"\n",
12640 if (info
->parm_size
)
12641 fprintf (stderr
, "\tparm_size = %5d\n", info
->parm_size
);
12643 if (info
->fixed_size
)
12644 fprintf (stderr
, "\tfixed_size = %5d\n", info
->fixed_size
);
12647 fprintf (stderr
, "\tgp_size = %5d\n", info
->gp_size
);
12649 if (info
->spe_gp_size
)
12650 fprintf (stderr
, "\tspe_gp_size = %5d\n", info
->spe_gp_size
);
12653 fprintf (stderr
, "\tfp_size = %5d\n", info
->fp_size
);
12655 if (info
->altivec_size
)
12656 fprintf (stderr
, "\taltivec_size = %5d\n", info
->altivec_size
);
12658 if (info
->vrsave_size
)
12659 fprintf (stderr
, "\tvrsave_size = %5d\n", info
->vrsave_size
);
12661 if (info
->altivec_padding_size
)
12662 fprintf (stderr
, "\taltivec_padding_size= %5d\n",
12663 info
->altivec_padding_size
);
12665 if (info
->spe_padding_size
)
12666 fprintf (stderr
, "\tspe_padding_size = %5d\n",
12667 info
->spe_padding_size
);
12670 fprintf (stderr
, "\tlr_size = %5d\n", info
->lr_size
);
12673 fprintf (stderr
, "\tcr_size = %5d\n", info
->cr_size
);
12675 if (info
->toc_size
)
12676 fprintf (stderr
, "\ttoc_size = %5d\n", info
->toc_size
);
12678 if (info
->save_size
)
12679 fprintf (stderr
, "\tsave_size = %5d\n", info
->save_size
);
12681 if (info
->reg_size
!= 4)
12682 fprintf (stderr
, "\treg_size = %5d\n", info
->reg_size
);
12684 fprintf (stderr
, "\n");
12688 rs6000_return_addr (int count
, rtx frame
)
12690 /* Currently we don't optimize very well between prolog and body
12691 code and for PIC code the code can be actually quite bad, so
12692 don't try to be too clever here. */
12693 if (count
!= 0 || (DEFAULT_ABI
!= ABI_AIX
&& flag_pic
))
12695 cfun
->machine
->ra_needs_full_frame
= 1;
12702 plus_constant (copy_to_reg
12703 (gen_rtx_MEM (Pmode
,
12704 memory_address (Pmode
, frame
))),
12705 RETURN_ADDRESS_OFFSET
)));
12708 cfun
->machine
->ra_need_lr
= 1;
12709 return get_hard_reg_initial_val (Pmode
, LINK_REGISTER_REGNUM
);
12712 /* Say whether a function is a candidate for sibcall handling or not.
12713 We do not allow indirect calls to be optimized into sibling calls.
12714 Also, we can't do it if there are any vector parameters; there's
12715 nowhere to put the VRsave code so it works; note that functions with
12716 vector parameters are required to have a prototype, so the argument
12717 type info must be available here. (The tail recursion case can work
12718 with vector parameters, but there's no way to distinguish here.) */
12720 rs6000_function_ok_for_sibcall (tree decl
, tree exp ATTRIBUTE_UNUSED
)
12725 if (TARGET_ALTIVEC_VRSAVE
)
12727 for (type
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
12728 type
; type
= TREE_CHAIN (type
))
12730 if (TREE_CODE (TREE_VALUE (type
)) == VECTOR_TYPE
)
12734 if (DEFAULT_ABI
== ABI_DARWIN
12735 || (*targetm
.binds_local_p
) (decl
))
12737 tree attr_list
= TYPE_ATTRIBUTES (TREE_TYPE (decl
));
12739 if (!lookup_attribute ("longcall", attr_list
)
12740 || lookup_attribute ("shortcall", attr_list
))
12747 /* NULL if INSN insn is valid within a low-overhead loop.
12748 Otherwise return why doloop cannot be applied.
12749 PowerPC uses the COUNT register for branch on table instructions. */
12751 static const char *
12752 rs6000_invalid_within_doloop (rtx insn
)
12755 return "Function call in the loop.";
12758 && (GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
12759 || GET_CODE (PATTERN (insn
)) == ADDR_VEC
))
12760 return "Computed branch in the loop.";
12766 rs6000_ra_ever_killed (void)
12772 if (current_function_is_thunk
)
12775 /* regs_ever_live has LR marked as used if any sibcalls are present,
12776 but this should not force saving and restoring in the
12777 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
12778 clobbers LR, so that is inappropriate. */
12780 /* Also, the prologue can generate a store into LR that
12781 doesn't really count, like this:
12784 bcl to set PIC register
12788 When we're called from the epilogue, we need to avoid counting
12789 this as a store. */
12791 push_topmost_sequence ();
12792 top
= get_insns ();
12793 pop_topmost_sequence ();
12794 reg
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
12796 for (insn
= NEXT_INSN (top
); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
12800 if (FIND_REG_INC_NOTE (insn
, reg
))
12802 else if (GET_CODE (insn
) == CALL_INSN
12803 && !SIBLING_CALL_P (insn
))
12805 else if (set_of (reg
, insn
) != NULL_RTX
12806 && !prologue_epilogue_contains (insn
))
12813 /* Add a REG_MAYBE_DEAD note to the insn. */
12815 rs6000_maybe_dead (rtx insn
)
12817 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
,
12822 /* Emit instructions needed to load the TOC register.
12823 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
12824 a constant pool; or for SVR4 -fpic. */
12827 rs6000_emit_load_toc_table (int fromprolog
)
12830 dest
= gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
);
12832 if (TARGET_ELF
&& TARGET_SECURE_PLT
&& DEFAULT_ABI
!= ABI_AIX
&& flag_pic
)
12835 rtx lab
, tmp1
, tmp2
, got
, tempLR
;
12837 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCF", rs6000_pic_labelno
);
12838 lab
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
12840 got
= gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
);
12842 got
= rs6000_got_sym ();
12843 tmp1
= tmp2
= dest
;
12846 tmp1
= gen_reg_rtx (Pmode
);
12847 tmp2
= gen_reg_rtx (Pmode
);
12849 tempLR
= (fromprolog
12850 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
12851 : gen_reg_rtx (Pmode
));
12852 insn
= emit_insn (gen_load_toc_v4_PIC_1 (tempLR
, lab
));
12854 rs6000_maybe_dead (insn
);
12855 insn
= emit_move_insn (tmp1
, tempLR
);
12857 rs6000_maybe_dead (insn
);
12858 insn
= emit_insn (gen_load_toc_v4_PIC_3b (tmp2
, tmp1
, got
, lab
));
12860 rs6000_maybe_dead (insn
);
12861 insn
= emit_insn (gen_load_toc_v4_PIC_3c (dest
, tmp2
, got
, lab
));
12863 rs6000_maybe_dead (insn
);
12865 else if (TARGET_ELF
&& DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
12867 rtx tempLR
= (fromprolog
12868 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
12869 : gen_reg_rtx (Pmode
));
12871 insn
= emit_insn (gen_load_toc_v4_pic_si (tempLR
));
12873 rs6000_maybe_dead (insn
);
12874 insn
= emit_move_insn (dest
, tempLR
);
12876 rs6000_maybe_dead (insn
);
12878 else if (TARGET_ELF
&& DEFAULT_ABI
!= ABI_AIX
&& flag_pic
== 2)
12881 rtx tempLR
= (fromprolog
12882 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
12883 : gen_reg_rtx (Pmode
));
12884 rtx temp0
= (fromprolog
12885 ? gen_rtx_REG (Pmode
, 0)
12886 : gen_reg_rtx (Pmode
));
12892 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCF", rs6000_pic_labelno
);
12893 symF
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
12895 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCL", rs6000_pic_labelno
);
12896 symL
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
12898 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR
,
12900 rs6000_maybe_dead (emit_move_insn (dest
, tempLR
));
12901 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0
, dest
,
12909 tocsym
= gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
);
12910 emit_insn (gen_load_toc_v4_PIC_1b (tempLR
, tocsym
));
12911 emit_move_insn (dest
, tempLR
);
12912 emit_move_insn (temp0
, gen_rtx_MEM (Pmode
, dest
));
12914 insn
= emit_insn (gen_addsi3 (dest
, temp0
, dest
));
12916 rs6000_maybe_dead (insn
);
12918 else if (TARGET_ELF
&& !TARGET_AIX
&& flag_pic
== 0 && TARGET_MINIMAL_TOC
)
12920 /* This is for AIX code running in non-PIC ELF32. */
12923 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCTOC", 1);
12924 realsym
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
12926 insn
= emit_insn (gen_elf_high (dest
, realsym
));
12928 rs6000_maybe_dead (insn
);
12929 insn
= emit_insn (gen_elf_low (dest
, dest
, realsym
));
12931 rs6000_maybe_dead (insn
);
12935 gcc_assert (DEFAULT_ABI
== ABI_AIX
);
12938 insn
= emit_insn (gen_load_toc_aix_si (dest
));
12940 insn
= emit_insn (gen_load_toc_aix_di (dest
));
12942 rs6000_maybe_dead (insn
);
12946 /* Emit instructions to restore the link register after determining where
12947 its value has been stored. */
12950 rs6000_emit_eh_reg_restore (rtx source
, rtx scratch
)
12952 rs6000_stack_t
*info
= rs6000_stack_info ();
12955 operands
[0] = source
;
12956 operands
[1] = scratch
;
12958 if (info
->lr_save_p
)
12960 rtx frame_rtx
= stack_pointer_rtx
;
12961 HOST_WIDE_INT sp_offset
= 0;
12964 if (frame_pointer_needed
12965 || current_function_calls_alloca
12966 || info
->total_size
> 32767)
12968 emit_move_insn (operands
[1], gen_rtx_MEM (Pmode
, frame_rtx
));
12969 frame_rtx
= operands
[1];
12971 else if (info
->push_p
)
12972 sp_offset
= info
->total_size
;
12974 tmp
= plus_constant (frame_rtx
, info
->lr_save_offset
+ sp_offset
);
12975 tmp
= gen_rtx_MEM (Pmode
, tmp
);
12976 emit_move_insn (tmp
, operands
[0]);
12979 emit_move_insn (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
), operands
[0]);
12982 static GTY(()) int set
= -1;
12985 get_TOC_alias_set (void)
12988 set
= new_alias_set ();
12992 /* This returns nonzero if the current function uses the TOC. This is
12993 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
12994 is generated by the ABI_V4 load_toc_* patterns. */
13001 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
13004 rtx pat
= PATTERN (insn
);
13007 if (GET_CODE (pat
) == PARALLEL
)
13008 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
13010 rtx sub
= XVECEXP (pat
, 0, i
);
13011 if (GET_CODE (sub
) == USE
)
13013 sub
= XEXP (sub
, 0);
13014 if (GET_CODE (sub
) == UNSPEC
13015 && XINT (sub
, 1) == UNSPEC_TOC
)
13025 create_TOC_reference (rtx symbol
)
13027 return gen_rtx_PLUS (Pmode
,
13028 gen_rtx_REG (Pmode
, TOC_REGISTER
),
13029 gen_rtx_CONST (Pmode
,
13030 gen_rtx_MINUS (Pmode
, symbol
,
13031 gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
))));
13034 /* If _Unwind_* has been called from within the same module,
13035 toc register is not guaranteed to be saved to 40(1) on function
13036 entry. Save it there in that case. */
13039 rs6000_aix_emit_builtin_unwind_init (void)
13042 rtx stack_top
= gen_reg_rtx (Pmode
);
13043 rtx opcode_addr
= gen_reg_rtx (Pmode
);
13044 rtx opcode
= gen_reg_rtx (SImode
);
13045 rtx tocompare
= gen_reg_rtx (SImode
);
13046 rtx no_toc_save_needed
= gen_label_rtx ();
13048 mem
= gen_rtx_MEM (Pmode
, hard_frame_pointer_rtx
);
13049 emit_move_insn (stack_top
, mem
);
13051 mem
= gen_rtx_MEM (Pmode
,
13052 gen_rtx_PLUS (Pmode
, stack_top
,
13053 GEN_INT (2 * GET_MODE_SIZE (Pmode
))));
13054 emit_move_insn (opcode_addr
, mem
);
13055 emit_move_insn (opcode
, gen_rtx_MEM (SImode
, opcode_addr
));
13056 emit_move_insn (tocompare
, gen_int_mode (TARGET_32BIT
? 0x80410014
13057 : 0xE8410028, SImode
));
13059 do_compare_rtx_and_jump (opcode
, tocompare
, EQ
, 1,
13060 SImode
, NULL_RTX
, NULL_RTX
,
13061 no_toc_save_needed
);
13063 mem
= gen_rtx_MEM (Pmode
,
13064 gen_rtx_PLUS (Pmode
, stack_top
,
13065 GEN_INT (5 * GET_MODE_SIZE (Pmode
))));
13066 emit_move_insn (mem
, gen_rtx_REG (Pmode
, 2));
13067 emit_label (no_toc_save_needed
);
13070 /* This ties together stack memory (MEM with an alias set of
13071 rs6000_sr_alias_set) and the change to the stack pointer. */
13074 rs6000_emit_stack_tie (void)
13076 rtx mem
= gen_rtx_MEM (BLKmode
, gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
));
13078 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13079 emit_insn (gen_stack_tie (mem
));
13082 /* Emit the correct code for allocating stack space, as insns.
13083 If COPY_R12, make sure a copy of the old frame is left in r12.
13084 The generated code may use hard register 0 as a temporary. */
13087 rs6000_emit_allocate_stack (HOST_WIDE_INT size
, int copy_r12
)
13090 rtx stack_reg
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
13091 rtx tmp_reg
= gen_rtx_REG (Pmode
, 0);
13092 rtx todec
= gen_int_mode (-size
, Pmode
);
13094 if (INTVAL (todec
) != -size
)
13096 warning (0, "stack frame too large");
13097 emit_insn (gen_trap ());
13101 if (current_function_limit_stack
)
13103 if (REG_P (stack_limit_rtx
)
13104 && REGNO (stack_limit_rtx
) > 1
13105 && REGNO (stack_limit_rtx
) <= 31)
13107 emit_insn (TARGET_32BIT
13108 ? gen_addsi3 (tmp_reg
,
13111 : gen_adddi3 (tmp_reg
,
13115 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
13118 else if (GET_CODE (stack_limit_rtx
) == SYMBOL_REF
13120 && DEFAULT_ABI
== ABI_V4
)
13122 rtx toload
= gen_rtx_CONST (VOIDmode
,
13123 gen_rtx_PLUS (Pmode
,
13127 emit_insn (gen_elf_high (tmp_reg
, toload
));
13128 emit_insn (gen_elf_low (tmp_reg
, tmp_reg
, toload
));
13129 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
13133 warning (0, "stack limit expression is not supported");
13136 if (copy_r12
|| ! TARGET_UPDATE
)
13137 emit_move_insn (gen_rtx_REG (Pmode
, 12), stack_reg
);
13143 /* Need a note here so that try_split doesn't get confused. */
13144 if (get_last_insn () == NULL_RTX
)
13145 emit_note (NOTE_INSN_DELETED
);
13146 insn
= emit_move_insn (tmp_reg
, todec
);
13147 try_split (PATTERN (insn
), insn
, 0);
13151 insn
= emit_insn (TARGET_32BIT
13152 ? gen_movsi_update (stack_reg
, stack_reg
,
13154 : gen_movdi_di_update (stack_reg
, stack_reg
,
13155 todec
, stack_reg
));
13159 insn
= emit_insn (TARGET_32BIT
13160 ? gen_addsi3 (stack_reg
, stack_reg
, todec
)
13161 : gen_adddi3 (stack_reg
, stack_reg
, todec
));
13162 emit_move_insn (gen_rtx_MEM (Pmode
, stack_reg
),
13163 gen_rtx_REG (Pmode
, 12));
13166 RTX_FRAME_RELATED_P (insn
) = 1;
13168 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
13169 gen_rtx_SET (VOIDmode
, stack_reg
,
13170 gen_rtx_PLUS (Pmode
, stack_reg
,
13175 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
13176 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
13177 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
13178 deduce these equivalences by itself so it wasn't necessary to hold
13179 its hand so much. */
13182 rs6000_frame_related (rtx insn
, rtx reg
, HOST_WIDE_INT val
,
13183 rtx reg2
, rtx rreg
)
13187 /* copy_rtx will not make unique copies of registers, so we need to
13188 ensure we don't have unwanted sharing here. */
13190 reg
= gen_raw_REG (GET_MODE (reg
), REGNO (reg
));
13193 reg
= gen_raw_REG (GET_MODE (reg
), REGNO (reg
));
13195 real
= copy_rtx (PATTERN (insn
));
13197 if (reg2
!= NULL_RTX
)
13198 real
= replace_rtx (real
, reg2
, rreg
);
13200 real
= replace_rtx (real
, reg
,
13201 gen_rtx_PLUS (Pmode
, gen_rtx_REG (Pmode
,
13202 STACK_POINTER_REGNUM
),
13205 /* We expect that 'real' is either a SET or a PARALLEL containing
13206 SETs (and possibly other stuff). In a PARALLEL, all the SETs
13207 are important so they all have to be marked RTX_FRAME_RELATED_P. */
13209 if (GET_CODE (real
) == SET
)
13213 temp
= simplify_rtx (SET_SRC (set
));
13215 SET_SRC (set
) = temp
;
13216 temp
= simplify_rtx (SET_DEST (set
));
13218 SET_DEST (set
) = temp
;
13219 if (GET_CODE (SET_DEST (set
)) == MEM
)
13221 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
13223 XEXP (SET_DEST (set
), 0) = temp
;
13230 gcc_assert (GET_CODE (real
) == PARALLEL
);
13231 for (i
= 0; i
< XVECLEN (real
, 0); i
++)
13232 if (GET_CODE (XVECEXP (real
, 0, i
)) == SET
)
13234 rtx set
= XVECEXP (real
, 0, i
);
13236 temp
= simplify_rtx (SET_SRC (set
));
13238 SET_SRC (set
) = temp
;
13239 temp
= simplify_rtx (SET_DEST (set
));
13241 SET_DEST (set
) = temp
;
13242 if (GET_CODE (SET_DEST (set
)) == MEM
)
13244 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
13246 XEXP (SET_DEST (set
), 0) = temp
;
13248 RTX_FRAME_RELATED_P (set
) = 1;
13253 real
= spe_synthesize_frame_save (real
);
13255 RTX_FRAME_RELATED_P (insn
) = 1;
13256 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
13261 /* Given an SPE frame note, return a PARALLEL of SETs with the
13262 original note, plus a synthetic register save. */
13265 spe_synthesize_frame_save (rtx real
)
13267 rtx synth
, offset
, reg
, real2
;
13269 if (GET_CODE (real
) != SET
13270 || GET_MODE (SET_SRC (real
)) != V2SImode
)
13273 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
13274 frame related note. The parallel contains a set of the register
13275 being saved, and another set to a synthetic register (n+1200).
13276 This is so we can differentiate between 64-bit and 32-bit saves.
13277 Words cannot describe this nastiness. */
13279 gcc_assert (GET_CODE (SET_DEST (real
)) == MEM
13280 && GET_CODE (XEXP (SET_DEST (real
), 0)) == PLUS
13281 && GET_CODE (SET_SRC (real
)) == REG
);
13284 (set (mem (plus (reg x) (const y)))
13287 (set (mem (plus (reg x) (const y+4)))
13291 real2
= copy_rtx (real
);
13292 PUT_MODE (SET_DEST (real2
), SImode
);
13293 reg
= SET_SRC (real2
);
13294 real2
= replace_rtx (real2
, reg
, gen_rtx_REG (SImode
, REGNO (reg
)));
13295 synth
= copy_rtx (real2
);
13297 if (BYTES_BIG_ENDIAN
)
13299 offset
= XEXP (XEXP (SET_DEST (real2
), 0), 1);
13300 real2
= replace_rtx (real2
, offset
, GEN_INT (INTVAL (offset
) + 4));
13303 reg
= SET_SRC (synth
);
13305 synth
= replace_rtx (synth
, reg
,
13306 gen_rtx_REG (SImode
, REGNO (reg
) + 1200));
13308 offset
= XEXP (XEXP (SET_DEST (synth
), 0), 1);
13309 synth
= replace_rtx (synth
, offset
,
13310 GEN_INT (INTVAL (offset
)
13311 + (BYTES_BIG_ENDIAN
? 0 : 4)));
13313 RTX_FRAME_RELATED_P (synth
) = 1;
13314 RTX_FRAME_RELATED_P (real2
) = 1;
13315 if (BYTES_BIG_ENDIAN
)
13316 real
= gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, synth
, real2
));
13318 real
= gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, real2
, synth
));
13323 /* Returns an insn that has a vrsave set operation with the
13324 appropriate CLOBBERs. */
13327 generate_set_vrsave (rtx reg
, rs6000_stack_t
*info
, int epiloguep
)
13330 rtx insn
, clobs
[TOTAL_ALTIVEC_REGS
+ 1];
13331 rtx vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
13334 = gen_rtx_SET (VOIDmode
,
13336 gen_rtx_UNSPEC_VOLATILE (SImode
,
13337 gen_rtvec (2, reg
, vrsave
),
13342 /* We need to clobber the registers in the mask so the scheduler
13343 does not move sets to VRSAVE before sets of AltiVec registers.
13345 However, if the function receives nonlocal gotos, reload will set
13346 all call saved registers live. We will end up with:
13348 (set (reg 999) (mem))
13349 (parallel [ (set (reg vrsave) (unspec blah))
13350 (clobber (reg 999))])
13352 The clobber will cause the store into reg 999 to be dead, and
13353 flow will attempt to delete an epilogue insn. In this case, we
13354 need an unspec use/set of the register. */
13356 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
13357 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
13359 if (!epiloguep
|| call_used_regs
[i
])
13360 clobs
[nclobs
++] = gen_rtx_CLOBBER (VOIDmode
,
13361 gen_rtx_REG (V4SImode
, i
));
13364 rtx reg
= gen_rtx_REG (V4SImode
, i
);
13367 = gen_rtx_SET (VOIDmode
,
13369 gen_rtx_UNSPEC (V4SImode
,
13370 gen_rtvec (1, reg
), 27));
13374 insn
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nclobs
));
13376 for (i
= 0; i
< nclobs
; ++i
)
13377 XVECEXP (insn
, 0, i
) = clobs
[i
];
13382 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
13383 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
13386 emit_frame_save (rtx frame_reg
, rtx frame_ptr
, enum machine_mode mode
,
13387 unsigned int regno
, int offset
, HOST_WIDE_INT total_size
)
13389 rtx reg
, offset_rtx
, insn
, mem
, addr
, int_rtx
;
13390 rtx replacea
, replaceb
;
13392 int_rtx
= GEN_INT (offset
);
13394 /* Some cases that need register indexed addressing. */
13395 if ((TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
13396 || (TARGET_E500_DOUBLE
&& mode
== DFmode
)
13398 && SPE_VECTOR_MODE (mode
)
13399 && !SPE_CONST_OFFSET_OK (offset
)))
13401 /* Whomever calls us must make sure r11 is available in the
13402 flow path of instructions in the prologue. */
13403 offset_rtx
= gen_rtx_REG (Pmode
, 11);
13404 emit_move_insn (offset_rtx
, int_rtx
);
13406 replacea
= offset_rtx
;
13407 replaceb
= int_rtx
;
13411 offset_rtx
= int_rtx
;
13412 replacea
= NULL_RTX
;
13413 replaceb
= NULL_RTX
;
13416 reg
= gen_rtx_REG (mode
, regno
);
13417 addr
= gen_rtx_PLUS (Pmode
, frame_reg
, offset_rtx
);
13418 mem
= gen_rtx_MEM (mode
, addr
);
13419 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13421 insn
= emit_move_insn (mem
, reg
);
13423 rs6000_frame_related (insn
, frame_ptr
, total_size
, replacea
, replaceb
);
13426 /* Emit an offset memory reference suitable for a frame store, while
13427 converting to a valid addressing mode. */
13430 gen_frame_mem_offset (enum machine_mode mode
, rtx reg
, int offset
)
13432 rtx int_rtx
, offset_rtx
;
13434 int_rtx
= GEN_INT (offset
);
13436 if ((TARGET_SPE_ABI
&& SPE_VECTOR_MODE (mode
))
13437 || (TARGET_E500_DOUBLE
&& mode
== DFmode
))
13439 offset_rtx
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
13440 emit_move_insn (offset_rtx
, int_rtx
);
13443 offset_rtx
= int_rtx
;
13445 return gen_rtx_MEM (mode
, gen_rtx_PLUS (Pmode
, reg
, offset_rtx
));
13448 /* Look for user-defined global regs. We should not save and restore these,
13449 and cannot use stmw/lmw if there are any in its range. */
13452 no_global_regs_above (int first_greg
)
13455 for (i
= 0; i
< 32 - first_greg
; i
++)
13456 if (global_regs
[first_greg
+ i
])
13461 #ifndef TARGET_FIX_AND_CONTINUE
13462 #define TARGET_FIX_AND_CONTINUE 0
13465 /* Emit function prologue as insns. */
13468 rs6000_emit_prologue (void)
13470 rs6000_stack_t
*info
= rs6000_stack_info ();
13471 enum machine_mode reg_mode
= Pmode
;
13472 int reg_size
= TARGET_32BIT
? 4 : 8;
13473 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
13474 rtx frame_ptr_rtx
= gen_rtx_REG (Pmode
, 12);
13475 rtx frame_reg_rtx
= sp_reg_rtx
;
13476 rtx cr_save_rtx
= NULL_RTX
;
13478 int saving_FPRs_inline
;
13479 int using_store_multiple
;
13480 HOST_WIDE_INT sp_offset
= 0;
13482 if (TARGET_FIX_AND_CONTINUE
)
13484 /* gdb on darwin arranges to forward a function from the old
13485 address by modifying the first 5 instructions of the function
13486 to branch to the overriding function. This is necessary to
13487 permit function pointers that point to the old function to
13488 actually forward to the new function. */
13489 emit_insn (gen_nop ());
13490 emit_insn (gen_nop ());
13491 emit_insn (gen_nop ());
13492 emit_insn (gen_nop ());
13493 emit_insn (gen_nop ());
13496 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
13498 reg_mode
= V2SImode
;
13502 using_store_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
13503 && (!TARGET_SPE_ABI
13504 || info
->spe_64bit_regs_used
== 0)
13505 && info
->first_gp_reg_save
< 31
13506 && no_global_regs_above (info
->first_gp_reg_save
));
13507 saving_FPRs_inline
= (info
->first_fp_reg_save
== 64
13508 || FP_SAVE_INLINE (info
->first_fp_reg_save
)
13509 || current_function_calls_eh_return
13510 || cfun
->machine
->ra_need_lr
);
13512 /* For V.4, update stack before we do any saving and set back pointer. */
13514 && (DEFAULT_ABI
== ABI_V4
13515 || current_function_calls_eh_return
))
13517 if (info
->total_size
< 32767)
13518 sp_offset
= info
->total_size
;
13520 frame_reg_rtx
= frame_ptr_rtx
;
13521 rs6000_emit_allocate_stack (info
->total_size
,
13522 (frame_reg_rtx
!= sp_reg_rtx
13523 && (info
->cr_save_p
13525 || info
->first_fp_reg_save
< 64
13526 || info
->first_gp_reg_save
< 32
13528 if (frame_reg_rtx
!= sp_reg_rtx
)
13529 rs6000_emit_stack_tie ();
13532 /* Handle world saves specially here. */
13533 if (WORLD_SAVE_P (info
))
13539 /* save_world expects lr in r0. */
13540 if (info
->lr_save_p
)
13542 insn
= emit_move_insn (gen_rtx_REG (Pmode
, 0),
13543 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
13544 RTX_FRAME_RELATED_P (insn
) = 1;
13547 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
13548 assumptions about the offsets of various bits of the stack
13550 gcc_assert (info
->gp_save_offset
== -220
13551 && info
->fp_save_offset
== -144
13552 && info
->lr_save_offset
== 8
13553 && info
->cr_save_offset
== 4
13556 && (!current_function_calls_eh_return
13557 || info
->ehrd_offset
== -432)
13558 && info
->vrsave_save_offset
== -224
13559 && info
->altivec_save_offset
== (-224 -16 -192));
13561 treg
= gen_rtx_REG (SImode
, 11);
13562 emit_move_insn (treg
, GEN_INT (-info
->total_size
));
13564 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
13565 in R11. It also clobbers R12, so beware! */
13567 /* Preserve CR2 for save_world prologues */
13569 sz
+= 32 - info
->first_gp_reg_save
;
13570 sz
+= 64 - info
->first_fp_reg_save
;
13571 sz
+= LAST_ALTIVEC_REGNO
- info
->first_altivec_reg_save
+ 1;
13572 p
= rtvec_alloc (sz
);
13574 RTVEC_ELT (p
, j
++) = gen_rtx_CLOBBER (VOIDmode
,
13575 gen_rtx_REG (Pmode
,
13576 LINK_REGISTER_REGNUM
));
13577 RTVEC_ELT (p
, j
++) = gen_rtx_USE (VOIDmode
,
13578 gen_rtx_SYMBOL_REF (Pmode
,
13580 /* We do floats first so that the instruction pattern matches
13582 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
13584 rtx reg
= gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
);
13585 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13586 GEN_INT (info
->fp_save_offset
13587 + sp_offset
+ 8 * i
));
13588 rtx mem
= gen_rtx_MEM (DFmode
, addr
);
13589 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13591 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, mem
, reg
);
13593 for (i
= 0; info
->first_altivec_reg_save
+ i
<= LAST_ALTIVEC_REGNO
; i
++)
13595 rtx reg
= gen_rtx_REG (V4SImode
, info
->first_altivec_reg_save
+ i
);
13596 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13597 GEN_INT (info
->altivec_save_offset
13598 + sp_offset
+ 16 * i
));
13599 rtx mem
= gen_rtx_MEM (V4SImode
, addr
);
13600 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13602 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, mem
, reg
);
13604 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
13606 rtx reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
13607 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13608 GEN_INT (info
->gp_save_offset
13609 + sp_offset
+ reg_size
* i
));
13610 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
13611 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13613 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, mem
, reg
);
13617 /* CR register traditionally saved as CR2. */
13618 rtx reg
= gen_rtx_REG (reg_mode
, CR2_REGNO
);
13619 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13620 GEN_INT (info
->cr_save_offset
13622 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
13623 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13625 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, mem
, reg
);
13627 /* Prevent any attempt to delete the setting of r0 and treg! */
13628 RTVEC_ELT (p
, j
++) = gen_rtx_USE (VOIDmode
, gen_rtx_REG (Pmode
, 0));
13629 RTVEC_ELT (p
, j
++) = gen_rtx_USE (VOIDmode
, treg
);
13630 RTVEC_ELT (p
, j
++) = gen_rtx_CLOBBER (VOIDmode
, sp_reg_rtx
);
13632 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
13633 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
13634 NULL_RTX
, NULL_RTX
);
13636 if (current_function_calls_eh_return
)
13641 unsigned int regno
= EH_RETURN_DATA_REGNO (i
);
13642 if (regno
== INVALID_REGNUM
)
13644 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, reg_mode
, regno
,
13645 info
->ehrd_offset
+ sp_offset
13646 + reg_size
* (int) i
,
13652 /* Save AltiVec registers if needed. */
13653 if (!WORLD_SAVE_P (info
) && TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
13657 /* There should be a non inline version of this, for when we
13658 are saving lots of vector registers. */
13659 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
13660 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
13662 rtx areg
, savereg
, mem
;
13665 offset
= info
->altivec_save_offset
+ sp_offset
13666 + 16 * (i
- info
->first_altivec_reg_save
);
13668 savereg
= gen_rtx_REG (V4SImode
, i
);
13670 areg
= gen_rtx_REG (Pmode
, 0);
13671 emit_move_insn (areg
, GEN_INT (offset
));
13673 /* AltiVec addressing mode is [reg+reg]. */
13674 mem
= gen_rtx_MEM (V4SImode
,
13675 gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
));
13677 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13679 insn
= emit_move_insn (mem
, savereg
);
13681 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
13682 areg
, GEN_INT (offset
));
13686 /* VRSAVE is a bit vector representing which AltiVec registers
13687 are used. The OS uses this to determine which vector
13688 registers to save on a context switch. We need to save
13689 VRSAVE on the stack frame, add whatever AltiVec registers we
13690 used in this function, and do the corresponding magic in the
13693 if (TARGET_ALTIVEC
&& TARGET_ALTIVEC_VRSAVE
13694 && info
->vrsave_mask
!= 0)
13696 rtx reg
, mem
, vrsave
;
13699 /* Get VRSAVE onto a GPR. Note that ABI_V4 might be using r12
13700 as frame_reg_rtx and r11 as the static chain pointer for
13701 nested functions. */
13702 reg
= gen_rtx_REG (SImode
, 0);
13703 vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
13705 emit_insn (gen_get_vrsave_internal (reg
));
13707 emit_insn (gen_rtx_SET (VOIDmode
, reg
, vrsave
));
13709 if (!WORLD_SAVE_P (info
))
13712 offset
= info
->vrsave_save_offset
+ sp_offset
;
13714 = gen_rtx_MEM (SImode
,
13715 gen_rtx_PLUS (Pmode
, frame_reg_rtx
, GEN_INT (offset
)));
13716 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13717 insn
= emit_move_insn (mem
, reg
);
13720 /* Include the registers in the mask. */
13721 emit_insn (gen_iorsi3 (reg
, reg
, GEN_INT ((int) info
->vrsave_mask
)));
13723 insn
= emit_insn (generate_set_vrsave (reg
, info
, 0));
13726 /* If we use the link register, get it into r0. */
13727 if (!WORLD_SAVE_P (info
) && info
->lr_save_p
)
13729 insn
= emit_move_insn (gen_rtx_REG (Pmode
, 0),
13730 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
13731 RTX_FRAME_RELATED_P (insn
) = 1;
13734 /* If we need to save CR, put it into r12. */
13735 if (!WORLD_SAVE_P (info
) && info
->cr_save_p
&& frame_reg_rtx
!= frame_ptr_rtx
)
13739 cr_save_rtx
= gen_rtx_REG (SImode
, 12);
13740 insn
= emit_insn (gen_movesi_from_cr (cr_save_rtx
));
13741 RTX_FRAME_RELATED_P (insn
) = 1;
13742 /* Now, there's no way that dwarf2out_frame_debug_expr is going
13743 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
13744 But that's OK. All we have to do is specify that _one_ condition
13745 code register is saved in this stack slot. The thrower's epilogue
13746 will then restore all the call-saved registers.
13747 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
13748 set
= gen_rtx_SET (VOIDmode
, cr_save_rtx
,
13749 gen_rtx_REG (SImode
, CR2_REGNO
));
13750 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
13755 /* Do any required saving of fpr's. If only one or two to save, do
13756 it ourselves. Otherwise, call function. */
13757 if (!WORLD_SAVE_P (info
) && saving_FPRs_inline
)
13760 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
13761 if ((regs_ever_live
[info
->first_fp_reg_save
+i
]
13762 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
13763 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, DFmode
,
13764 info
->first_fp_reg_save
+ i
,
13765 info
->fp_save_offset
+ sp_offset
+ 8 * i
,
13768 else if (!WORLD_SAVE_P (info
) && info
->first_fp_reg_save
!= 64)
13772 const char *alloc_rname
;
13774 p
= rtvec_alloc (2 + 64 - info
->first_fp_reg_save
);
13776 RTVEC_ELT (p
, 0) = gen_rtx_CLOBBER (VOIDmode
,
13777 gen_rtx_REG (Pmode
,
13778 LINK_REGISTER_REGNUM
));
13779 sprintf (rname
, "%s%d%s", SAVE_FP_PREFIX
,
13780 info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
);
13781 alloc_rname
= ggc_strdup (rname
);
13782 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
13783 gen_rtx_SYMBOL_REF (Pmode
,
13785 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
13787 rtx addr
, reg
, mem
;
13788 reg
= gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
);
13789 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13790 GEN_INT (info
->fp_save_offset
13791 + sp_offset
+ 8*i
));
13792 mem
= gen_rtx_MEM (DFmode
, addr
);
13793 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13795 RTVEC_ELT (p
, i
+ 2) = gen_rtx_SET (VOIDmode
, mem
, reg
);
13797 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
13798 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
13799 NULL_RTX
, NULL_RTX
);
13802 /* Save GPRs. This is done as a PARALLEL if we are using
13803 the store-multiple instructions. */
13804 if (!WORLD_SAVE_P (info
) && using_store_multiple
)
13808 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
13809 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
13811 rtx addr
, reg
, mem
;
13812 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
13813 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13814 GEN_INT (info
->gp_save_offset
13817 mem
= gen_rtx_MEM (reg_mode
, addr
);
13818 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13820 RTVEC_ELT (p
, i
) = gen_rtx_SET (VOIDmode
, mem
, reg
);
13822 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
13823 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
13824 NULL_RTX
, NULL_RTX
);
13826 else if (!WORLD_SAVE_P (info
))
13829 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
13830 if ((regs_ever_live
[info
->first_gp_reg_save
+ i
]
13831 && (!call_used_regs
[info
->first_gp_reg_save
+ i
]
13832 || (i
+ info
->first_gp_reg_save
13833 == RS6000_PIC_OFFSET_TABLE_REGNUM
13834 && TARGET_TOC
&& TARGET_MINIMAL_TOC
)))
13835 || (i
+ info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
13836 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
13837 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))))
13839 rtx addr
, reg
, mem
;
13840 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
13842 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
13844 int offset
= info
->spe_gp_save_offset
+ sp_offset
+ 8 * i
;
13847 if (!SPE_CONST_OFFSET_OK (offset
))
13849 b
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
13850 emit_move_insn (b
, GEN_INT (offset
));
13853 b
= GEN_INT (offset
);
13855 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, b
);
13856 mem
= gen_rtx_MEM (V2SImode
, addr
);
13857 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13858 insn
= emit_move_insn (mem
, reg
);
13860 if (GET_CODE (b
) == CONST_INT
)
13861 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
13862 NULL_RTX
, NULL_RTX
);
13864 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
13865 b
, GEN_INT (offset
));
13869 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13870 GEN_INT (info
->gp_save_offset
13873 mem
= gen_rtx_MEM (reg_mode
, addr
);
13874 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13876 insn
= emit_move_insn (mem
, reg
);
13877 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
13878 NULL_RTX
, NULL_RTX
);
13883 /* ??? There's no need to emit actual instructions here, but it's the
13884 easiest way to get the frame unwind information emitted. */
13885 if (!WORLD_SAVE_P (info
) && current_function_calls_eh_return
)
13887 unsigned int i
, regno
;
13889 /* In AIX ABI we need to pretend we save r2 here. */
13892 rtx addr
, reg
, mem
;
13894 reg
= gen_rtx_REG (reg_mode
, 2);
13895 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13896 GEN_INT (sp_offset
+ 5 * reg_size
));
13897 mem
= gen_rtx_MEM (reg_mode
, addr
);
13898 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13900 insn
= emit_move_insn (mem
, reg
);
13901 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
13902 NULL_RTX
, NULL_RTX
);
13903 PATTERN (insn
) = gen_blockage ();
13908 regno
= EH_RETURN_DATA_REGNO (i
);
13909 if (regno
== INVALID_REGNUM
)
13912 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, reg_mode
, regno
,
13913 info
->ehrd_offset
+ sp_offset
13914 + reg_size
* (int) i
,
13919 /* Save lr if we used it. */
13920 if (!WORLD_SAVE_P (info
) && info
->lr_save_p
)
13922 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13923 GEN_INT (info
->lr_save_offset
+ sp_offset
));
13924 rtx reg
= gen_rtx_REG (Pmode
, 0);
13925 rtx mem
= gen_rtx_MEM (Pmode
, addr
);
13926 /* This should not be of rs6000_sr_alias_set, because of
13927 __builtin_return_address. */
13929 insn
= emit_move_insn (mem
, reg
);
13930 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
13931 NULL_RTX
, NULL_RTX
);
13934 /* Save CR if we use any that must be preserved. */
13935 if (!WORLD_SAVE_P (info
) && info
->cr_save_p
)
13937 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13938 GEN_INT (info
->cr_save_offset
+ sp_offset
));
13939 rtx mem
= gen_rtx_MEM (SImode
, addr
);
13940 /* See the large comment above about why CR2_REGNO is used. */
13941 rtx magic_eh_cr_reg
= gen_rtx_REG (SImode
, CR2_REGNO
);
13943 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13945 /* If r12 was used to hold the original sp, copy cr into r0 now
13947 if (REGNO (frame_reg_rtx
) == 12)
13951 cr_save_rtx
= gen_rtx_REG (SImode
, 0);
13952 insn
= emit_insn (gen_movesi_from_cr (cr_save_rtx
));
13953 RTX_FRAME_RELATED_P (insn
) = 1;
13954 set
= gen_rtx_SET (VOIDmode
, cr_save_rtx
, magic_eh_cr_reg
);
13955 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
13960 insn
= emit_move_insn (mem
, cr_save_rtx
);
13962 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
13963 NULL_RTX
, NULL_RTX
);
13966 /* Update stack and set back pointer unless this is V.4,
13967 for which it was done previously. */
13968 if (!WORLD_SAVE_P (info
) && info
->push_p
13969 && !(DEFAULT_ABI
== ABI_V4
|| current_function_calls_eh_return
))
13970 rs6000_emit_allocate_stack (info
->total_size
, FALSE
);
13972 /* Set frame pointer, if needed. */
13973 if (frame_pointer_needed
)
13975 insn
= emit_move_insn (gen_rtx_REG (Pmode
, HARD_FRAME_POINTER_REGNUM
),
13977 RTX_FRAME_RELATED_P (insn
) = 1;
13980 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
13981 if ((TARGET_TOC
&& TARGET_MINIMAL_TOC
&& get_pool_size () != 0)
13982 || (DEFAULT_ABI
== ABI_V4
13983 && (flag_pic
== 1 || (flag_pic
&& TARGET_SECURE_PLT
))
13984 && regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
]))
13986 /* If emit_load_toc_table will use the link register, we need to save
13987 it. We use R12 for this purpose because emit_load_toc_table
13988 can use register 0. This allows us to use a plain 'blr' to return
13989 from the procedure more often. */
13990 int save_LR_around_toc_setup
= (TARGET_ELF
13991 && DEFAULT_ABI
!= ABI_AIX
13993 && ! info
->lr_save_p
13994 && EDGE_COUNT (EXIT_BLOCK_PTR
->preds
) > 0);
13995 if (save_LR_around_toc_setup
)
13997 rtx lr
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
13999 insn
= emit_move_insn (frame_ptr_rtx
, lr
);
14000 rs6000_maybe_dead (insn
);
14001 RTX_FRAME_RELATED_P (insn
) = 1;
14003 rs6000_emit_load_toc_table (TRUE
);
14005 insn
= emit_move_insn (lr
, frame_ptr_rtx
);
14006 rs6000_maybe_dead (insn
);
14007 RTX_FRAME_RELATED_P (insn
) = 1;
14010 rs6000_emit_load_toc_table (TRUE
);
14014 if (DEFAULT_ABI
== ABI_DARWIN
14015 && flag_pic
&& current_function_uses_pic_offset_table
)
14017 rtx lr
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
14018 rtx src
= machopic_function_base_sym ();
14020 /* Save and restore LR locally around this call (in R0). */
14021 if (!info
->lr_save_p
)
14022 rs6000_maybe_dead (emit_move_insn (gen_rtx_REG (Pmode
, 0), lr
));
14024 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (lr
, src
)));
14026 insn
= emit_move_insn (gen_rtx_REG (Pmode
,
14027 RS6000_PIC_OFFSET_TABLE_REGNUM
),
14029 rs6000_maybe_dead (insn
);
14031 if (!info
->lr_save_p
)
14032 rs6000_maybe_dead (emit_move_insn (lr
, gen_rtx_REG (Pmode
, 0)));
14037 /* Write function prologue. */
14040 rs6000_output_function_prologue (FILE *file
,
14041 HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
14043 rs6000_stack_t
*info
= rs6000_stack_info ();
14045 if (TARGET_DEBUG_STACK
)
14046 debug_stack_info (info
);
14048 /* Write .extern for any function we will call to save and restore
14050 if (info
->first_fp_reg_save
< 64
14051 && !FP_SAVE_INLINE (info
->first_fp_reg_save
))
14052 fprintf (file
, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
14053 SAVE_FP_PREFIX
, info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
,
14054 RESTORE_FP_PREFIX
, info
->first_fp_reg_save
- 32,
14055 RESTORE_FP_SUFFIX
);
14057 /* Write .extern for AIX common mode routines, if needed. */
14058 if (! TARGET_POWER
&& ! TARGET_POWERPC
&& ! common_mode_defined
)
14060 fputs ("\t.extern __mulh\n", file
);
14061 fputs ("\t.extern __mull\n", file
);
14062 fputs ("\t.extern __divss\n", file
);
14063 fputs ("\t.extern __divus\n", file
);
14064 fputs ("\t.extern __quoss\n", file
);
14065 fputs ("\t.extern __quous\n", file
);
14066 common_mode_defined
= 1;
14069 if (! HAVE_prologue
)
14073 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
14074 the "toplevel" insn chain. */
14075 emit_note (NOTE_INSN_DELETED
);
14076 rs6000_emit_prologue ();
14077 emit_note (NOTE_INSN_DELETED
);
14079 /* Expand INSN_ADDRESSES so final() doesn't crash. */
14083 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
14085 INSN_ADDRESSES_NEW (insn
, addr
);
14090 if (TARGET_DEBUG_STACK
)
14091 debug_rtx_list (get_insns (), 100);
14092 final (get_insns (), file
, FALSE
);
14096 rs6000_pic_labelno
++;
14099 /* Emit function epilogue as insns.
14101 At present, dwarf2out_frame_debug_expr doesn't understand
14102 register restores, so we don't bother setting RTX_FRAME_RELATED_P
14103 anywhere in the epilogue. Most of the insns below would in any case
14104 need special notes to explain where r11 is in relation to the stack. */
14107 rs6000_emit_epilogue (int sibcall
)
14109 rs6000_stack_t
*info
;
14110 int restoring_FPRs_inline
;
14111 int using_load_multiple
;
14112 int using_mfcr_multiple
;
14113 int use_backchain_to_restore_sp
;
14115 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, 1);
14116 rtx frame_reg_rtx
= sp_reg_rtx
;
14117 enum machine_mode reg_mode
= Pmode
;
14118 int reg_size
= TARGET_32BIT
? 4 : 8;
14121 info
= rs6000_stack_info ();
14123 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
14125 reg_mode
= V2SImode
;
14129 using_load_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
14130 && (!TARGET_SPE_ABI
14131 || info
->spe_64bit_regs_used
== 0)
14132 && info
->first_gp_reg_save
< 31
14133 && no_global_regs_above (info
->first_gp_reg_save
));
14134 restoring_FPRs_inline
= (sibcall
14135 || current_function_calls_eh_return
14136 || info
->first_fp_reg_save
== 64
14137 || FP_SAVE_INLINE (info
->first_fp_reg_save
));
14138 use_backchain_to_restore_sp
= (frame_pointer_needed
14139 || current_function_calls_alloca
14140 || info
->total_size
> 32767);
14141 using_mfcr_multiple
= (rs6000_cpu
== PROCESSOR_PPC601
14142 || rs6000_cpu
== PROCESSOR_PPC603
14143 || rs6000_cpu
== PROCESSOR_PPC750
14146 if (WORLD_SAVE_P (info
))
14150 const char *alloc_rname
;
14153 /* eh_rest_world_r10 will return to the location saved in the LR
14154 stack slot (which is not likely to be our caller.)
14155 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
14156 rest_world is similar, except any R10 parameter is ignored.
14157 The exception-handling stuff that was here in 2.95 is no
14158 longer necessary. */
14162 + 32 - info
->first_gp_reg_save
14163 + LAST_ALTIVEC_REGNO
+ 1 - info
->first_altivec_reg_save
14164 + 63 + 1 - info
->first_fp_reg_save
);
14166 strcpy (rname
, ((current_function_calls_eh_return
) ?
14167 "*eh_rest_world_r10" : "*rest_world"));
14168 alloc_rname
= ggc_strdup (rname
);
14171 RTVEC_ELT (p
, j
++) = gen_rtx_RETURN (VOIDmode
);
14172 RTVEC_ELT (p
, j
++) = gen_rtx_USE (VOIDmode
,
14173 gen_rtx_REG (Pmode
,
14174 LINK_REGISTER_REGNUM
));
14176 = gen_rtx_USE (VOIDmode
, gen_rtx_SYMBOL_REF (Pmode
, alloc_rname
));
14177 /* The instruction pattern requires a clobber here;
14178 it is shared with the restVEC helper. */
14180 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 11));
14183 /* CR register traditionally saved as CR2. */
14184 rtx reg
= gen_rtx_REG (reg_mode
, CR2_REGNO
);
14185 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
14186 GEN_INT (info
->cr_save_offset
));
14187 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
14188 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
14190 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, reg
, mem
);
14193 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
14195 rtx reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
14196 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
14197 GEN_INT (info
->gp_save_offset
14199 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
14200 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
14202 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, reg
, mem
);
14204 for (i
= 0; info
->first_altivec_reg_save
+ i
<= LAST_ALTIVEC_REGNO
; i
++)
14206 rtx reg
= gen_rtx_REG (V4SImode
, info
->first_altivec_reg_save
+ i
);
14207 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
14208 GEN_INT (info
->altivec_save_offset
14210 rtx mem
= gen_rtx_MEM (V4SImode
, addr
);
14211 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
14213 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, reg
, mem
);
14215 for (i
= 0; info
->first_fp_reg_save
+ i
<= 63; i
++)
14217 rtx reg
= gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
);
14218 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
14219 GEN_INT (info
->fp_save_offset
14221 rtx mem
= gen_rtx_MEM (DFmode
, addr
);
14222 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
14224 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, reg
, mem
);
14227 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 0));
14229 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (SImode
, 12));
14231 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (SImode
, 7));
14233 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (SImode
, 8));
14235 = gen_rtx_USE (VOIDmode
, gen_rtx_REG (SImode
, 10));
14236 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
14241 /* If we have a frame pointer, a call to alloca, or a large stack
14242 frame, restore the old stack pointer using the backchain. Otherwise,
14243 we know what size to update it with. */
14244 if (use_backchain_to_restore_sp
)
14246 /* Under V.4, don't reset the stack pointer until after we're done
14247 loading the saved registers. */
14248 if (DEFAULT_ABI
== ABI_V4
)
14249 frame_reg_rtx
= gen_rtx_REG (Pmode
, 11);
14251 emit_move_insn (frame_reg_rtx
,
14252 gen_rtx_MEM (Pmode
, sp_reg_rtx
));
14255 else if (info
->push_p
)
14257 if (DEFAULT_ABI
== ABI_V4
14258 || current_function_calls_eh_return
)
14259 sp_offset
= info
->total_size
;
14262 emit_insn (TARGET_32BIT
14263 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
14264 GEN_INT (info
->total_size
))
14265 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
14266 GEN_INT (info
->total_size
)));
14270 /* Restore AltiVec registers if needed. */
14271 if (TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
14275 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
14276 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
14278 rtx addr
, areg
, mem
;
14280 areg
= gen_rtx_REG (Pmode
, 0);
14282 (areg
, GEN_INT (info
->altivec_save_offset
14284 + 16 * (i
- info
->first_altivec_reg_save
)));
14286 /* AltiVec addressing mode is [reg+reg]. */
14287 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
);
14288 mem
= gen_rtx_MEM (V4SImode
, addr
);
14289 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
14291 emit_move_insn (gen_rtx_REG (V4SImode
, i
), mem
);
14295 /* Restore VRSAVE if needed. */
14296 if (TARGET_ALTIVEC
&& TARGET_ALTIVEC_VRSAVE
14297 && info
->vrsave_mask
!= 0)
14299 rtx addr
, mem
, reg
;
14301 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
14302 GEN_INT (info
->vrsave_save_offset
+ sp_offset
));
14303 mem
= gen_rtx_MEM (SImode
, addr
);
14304 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
14305 reg
= gen_rtx_REG (SImode
, 12);
14306 emit_move_insn (reg
, mem
);
14308 emit_insn (generate_set_vrsave (reg
, info
, 1));
14311 /* Get the old lr if we saved it. */
14312 if (info
->lr_save_p
)
14314 rtx mem
= gen_frame_mem_offset (Pmode
, frame_reg_rtx
,
14315 info
->lr_save_offset
+ sp_offset
);
14317 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
14319 emit_move_insn (gen_rtx_REG (Pmode
, 0), mem
);
14322 /* Get the old cr if we saved it. */
14323 if (info
->cr_save_p
)
14325 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
14326 GEN_INT (info
->cr_save_offset
+ sp_offset
));
14327 rtx mem
= gen_rtx_MEM (SImode
, addr
);
14329 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
14331 emit_move_insn (gen_rtx_REG (SImode
, 12), mem
);
14334 /* Set LR here to try to overlap restores below. */
14335 if (info
->lr_save_p
)
14336 emit_move_insn (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
),
14337 gen_rtx_REG (Pmode
, 0));
14339 /* Load exception handler data registers, if needed. */
14340 if (current_function_calls_eh_return
)
14342 unsigned int i
, regno
;
14346 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
14347 GEN_INT (sp_offset
+ 5 * reg_size
));
14348 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
14350 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
14352 emit_move_insn (gen_rtx_REG (reg_mode
, 2), mem
);
14359 regno
= EH_RETURN_DATA_REGNO (i
);
14360 if (regno
== INVALID_REGNUM
)
14363 mem
= gen_frame_mem_offset (reg_mode
, frame_reg_rtx
,
14364 info
->ehrd_offset
+ sp_offset
14365 + reg_size
* (int) i
);
14366 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
14368 emit_move_insn (gen_rtx_REG (reg_mode
, regno
), mem
);
14372 /* Restore GPRs. This is done as a PARALLEL if we are using
14373 the load-multiple instructions. */
14374 if (using_load_multiple
)
14377 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
14378 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
14380 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
14381 GEN_INT (info
->gp_save_offset
14384 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
14386 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
14389 gen_rtx_SET (VOIDmode
,
14390 gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
),
14393 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
14396 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
14397 if ((regs_ever_live
[info
->first_gp_reg_save
+ i
]
14398 && (!call_used_regs
[info
->first_gp_reg_save
+ i
]
14399 || (i
+ info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
14400 && TARGET_TOC
&& TARGET_MINIMAL_TOC
)))
14401 || (i
+ info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
14402 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
14403 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))))
14405 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
14406 GEN_INT (info
->gp_save_offset
14409 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
14411 /* Restore 64-bit quantities for SPE. */
14412 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
14414 int offset
= info
->spe_gp_save_offset
+ sp_offset
+ 8 * i
;
14417 if (!SPE_CONST_OFFSET_OK (offset
))
14419 b
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
14420 emit_move_insn (b
, GEN_INT (offset
));
14423 b
= GEN_INT (offset
);
14425 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, b
);
14426 mem
= gen_rtx_MEM (V2SImode
, addr
);
14429 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
14431 emit_move_insn (gen_rtx_REG (reg_mode
,
14432 info
->first_gp_reg_save
+ i
), mem
);
14435 /* Restore fpr's if we need to do it without calling a function. */
14436 if (restoring_FPRs_inline
)
14437 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
14438 if ((regs_ever_live
[info
->first_fp_reg_save
+i
]
14439 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
14442 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
14443 GEN_INT (info
->fp_save_offset
14446 mem
= gen_rtx_MEM (DFmode
, addr
);
14447 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
14449 emit_move_insn (gen_rtx_REG (DFmode
,
14450 info
->first_fp_reg_save
+ i
),
14454 /* If we saved cr, restore it here. Just those that were used. */
14455 if (info
->cr_save_p
)
14457 rtx r12_rtx
= gen_rtx_REG (SImode
, 12);
14460 if (using_mfcr_multiple
)
14462 for (i
= 0; i
< 8; i
++)
14463 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
14465 gcc_assert (count
);
14468 if (using_mfcr_multiple
&& count
> 1)
14473 p
= rtvec_alloc (count
);
14476 for (i
= 0; i
< 8; i
++)
14477 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
14479 rtvec r
= rtvec_alloc (2);
14480 RTVEC_ELT (r
, 0) = r12_rtx
;
14481 RTVEC_ELT (r
, 1) = GEN_INT (1 << (7-i
));
14482 RTVEC_ELT (p
, ndx
) =
14483 gen_rtx_SET (VOIDmode
, gen_rtx_REG (CCmode
, CR0_REGNO
+i
),
14484 gen_rtx_UNSPEC (CCmode
, r
, UNSPEC_MOVESI_TO_CR
));
14487 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
14488 gcc_assert (ndx
== count
);
14491 for (i
= 0; i
< 8; i
++)
14492 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
14494 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode
,
14500 /* If this is V.4, unwind the stack pointer after all of the loads
14501 have been done. We need to emit a block here so that sched
14502 doesn't decide to move the sp change before the register restores
14503 (which may not have any obvious dependency on the stack). This
14504 doesn't hurt performance, because there is no scheduling that can
14505 be done after this point. */
14506 if (DEFAULT_ABI
== ABI_V4
14507 || current_function_calls_eh_return
)
14509 if (frame_reg_rtx
!= sp_reg_rtx
)
14510 rs6000_emit_stack_tie ();
14512 if (use_backchain_to_restore_sp
)
14514 emit_move_insn (sp_reg_rtx
, frame_reg_rtx
);
14516 else if (sp_offset
!= 0)
14518 emit_insn (TARGET_32BIT
14519 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
14520 GEN_INT (sp_offset
))
14521 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
14522 GEN_INT (sp_offset
)));
14526 if (current_function_calls_eh_return
)
14528 rtx sa
= EH_RETURN_STACKADJ_RTX
;
14529 emit_insn (TARGET_32BIT
14530 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
, sa
)
14531 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
, sa
));
14537 if (! restoring_FPRs_inline
)
14538 p
= rtvec_alloc (3 + 64 - info
->first_fp_reg_save
);
14540 p
= rtvec_alloc (2);
14542 RTVEC_ELT (p
, 0) = gen_rtx_RETURN (VOIDmode
);
14543 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
14544 gen_rtx_REG (Pmode
,
14545 LINK_REGISTER_REGNUM
));
14547 /* If we have to restore more than two FP registers, branch to the
14548 restore function. It will return to our caller. */
14549 if (! restoring_FPRs_inline
)
14553 const char *alloc_rname
;
14555 sprintf (rname
, "%s%d%s", RESTORE_FP_PREFIX
,
14556 info
->first_fp_reg_save
- 32, RESTORE_FP_SUFFIX
);
14557 alloc_rname
= ggc_strdup (rname
);
14558 RTVEC_ELT (p
, 2) = gen_rtx_USE (VOIDmode
,
14559 gen_rtx_SYMBOL_REF (Pmode
,
14562 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
14565 addr
= gen_rtx_PLUS (Pmode
, sp_reg_rtx
,
14566 GEN_INT (info
->fp_save_offset
+ 8*i
));
14567 mem
= gen_rtx_MEM (DFmode
, addr
);
14568 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
14570 RTVEC_ELT (p
, i
+3) =
14571 gen_rtx_SET (VOIDmode
,
14572 gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
),
14577 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
14581 /* Write function epilogue. */
14584 rs6000_output_function_epilogue (FILE *file
,
14585 HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
14587 rs6000_stack_t
*info
= rs6000_stack_info ();
14589 if (! HAVE_epilogue
)
14591 rtx insn
= get_last_insn ();
14592 /* If the last insn was a BARRIER, we don't have to write anything except
14593 the trace table. */
14594 if (GET_CODE (insn
) == NOTE
)
14595 insn
= prev_nonnote_insn (insn
);
14596 if (insn
== 0 || GET_CODE (insn
) != BARRIER
)
14598 /* This is slightly ugly, but at least we don't have two
14599 copies of the epilogue-emitting code. */
14602 /* A NOTE_INSN_DELETED is supposed to be at the start
14603 and end of the "toplevel" insn chain. */
14604 emit_note (NOTE_INSN_DELETED
);
14605 rs6000_emit_epilogue (FALSE
);
14606 emit_note (NOTE_INSN_DELETED
);
14608 /* Expand INSN_ADDRESSES so final() doesn't crash. */
14612 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
14614 INSN_ADDRESSES_NEW (insn
, addr
);
14619 if (TARGET_DEBUG_STACK
)
14620 debug_rtx_list (get_insns (), 100);
14621 final (get_insns (), file
, FALSE
);
14627 macho_branch_islands ();
14628 /* Mach-O doesn't support labels at the end of objects, so if
14629 it looks like we might want one, insert a NOP. */
14631 rtx insn
= get_last_insn ();
14634 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_DELETED_LABEL
)
14635 insn
= PREV_INSN (insn
);
14639 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_DELETED_LABEL
)))
14640 fputs ("\tnop\n", file
);
14644 /* Output a traceback table here. See /usr/include/sys/debug.h for info
14647 We don't output a traceback table if -finhibit-size-directive was
14648 used. The documentation for -finhibit-size-directive reads
14649 ``don't output a @code{.size} assembler directive, or anything
14650 else that would cause trouble if the function is split in the
14651 middle, and the two halves are placed at locations far apart in
14652 memory.'' The traceback table has this property, since it
14653 includes the offset from the start of the function to the
14654 traceback table itself.
14656 System V.4 Powerpc's (and the embedded ABI derived from it) use a
14657 different traceback table. */
14658 if (DEFAULT_ABI
== ABI_AIX
&& ! flag_inhibit_size_directive
14659 && rs6000_traceback
!= traceback_none
)
14661 const char *fname
= NULL
;
14662 const char *language_string
= lang_hooks
.name
;
14663 int fixed_parms
= 0, float_parms
= 0, parm_info
= 0;
14665 int optional_tbtab
;
14667 if (rs6000_traceback
== traceback_full
)
14668 optional_tbtab
= 1;
14669 else if (rs6000_traceback
== traceback_part
)
14670 optional_tbtab
= 0;
14672 optional_tbtab
= !optimize_size
&& !TARGET_ELF
;
14674 if (optional_tbtab
)
14676 fname
= XSTR (XEXP (DECL_RTL (current_function_decl
), 0), 0);
14677 while (*fname
== '.') /* V.4 encodes . in the name */
14680 /* Need label immediately before tbtab, so we can compute
14681 its offset from the function start. */
14682 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
14683 ASM_OUTPUT_LABEL (file
, fname
);
14686 /* The .tbtab pseudo-op can only be used for the first eight
14687 expressions, since it can't handle the possibly variable
14688 length fields that follow. However, if you omit the optional
14689 fields, the assembler outputs zeros for all optional fields
14690 anyways, giving each variable length field is minimum length
14691 (as defined in sys/debug.h). Thus we can not use the .tbtab
14692 pseudo-op at all. */
14694 /* An all-zero word flags the start of the tbtab, for debuggers
14695 that have to find it by searching forward from the entry
14696 point or from the current pc. */
14697 fputs ("\t.long 0\n", file
);
14699 /* Tbtab format type. Use format type 0. */
14700 fputs ("\t.byte 0,", file
);
14702 /* Language type. Unfortunately, there does not seem to be any
14703 official way to discover the language being compiled, so we
14704 use language_string.
14705 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
14706 Java is 13. Objective-C is 14. */
14707 if (! strcmp (language_string
, "GNU C"))
14709 else if (! strcmp (language_string
, "GNU F77")
14710 || ! strcmp (language_string
, "GNU F95"))
14712 else if (! strcmp (language_string
, "GNU Pascal"))
14714 else if (! strcmp (language_string
, "GNU Ada"))
14716 else if (! strcmp (language_string
, "GNU C++"))
14718 else if (! strcmp (language_string
, "GNU Java"))
14720 else if (! strcmp (language_string
, "GNU Objective-C"))
14723 gcc_unreachable ();
14724 fprintf (file
, "%d,", i
);
14726 /* 8 single bit fields: global linkage (not set for C extern linkage,
14727 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
14728 from start of procedure stored in tbtab, internal function, function
14729 has controlled storage, function has no toc, function uses fp,
14730 function logs/aborts fp operations. */
14731 /* Assume that fp operations are used if any fp reg must be saved. */
14732 fprintf (file
, "%d,",
14733 (optional_tbtab
<< 5) | ((info
->first_fp_reg_save
!= 64) << 1));
14735 /* 6 bitfields: function is interrupt handler, name present in
14736 proc table, function calls alloca, on condition directives
14737 (controls stack walks, 3 bits), saves condition reg, saves
14739 /* The `function calls alloca' bit seems to be set whenever reg 31 is
14740 set up as a frame pointer, even when there is no alloca call. */
14741 fprintf (file
, "%d,",
14742 ((optional_tbtab
<< 6)
14743 | ((optional_tbtab
& frame_pointer_needed
) << 5)
14744 | (info
->cr_save_p
<< 1)
14745 | (info
->lr_save_p
)));
14747 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
14749 fprintf (file
, "%d,",
14750 (info
->push_p
<< 7) | (64 - info
->first_fp_reg_save
));
14752 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
14753 fprintf (file
, "%d,", (32 - first_reg_to_save ()));
14755 if (optional_tbtab
)
14757 /* Compute the parameter info from the function decl argument
14760 int next_parm_info_bit
= 31;
14762 for (decl
= DECL_ARGUMENTS (current_function_decl
);
14763 decl
; decl
= TREE_CHAIN (decl
))
14765 rtx parameter
= DECL_INCOMING_RTL (decl
);
14766 enum machine_mode mode
= GET_MODE (parameter
);
14768 if (GET_CODE (parameter
) == REG
)
14770 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
14788 gcc_unreachable ();
14791 /* If only one bit will fit, don't or in this entry. */
14792 if (next_parm_info_bit
> 0)
14793 parm_info
|= (bits
<< (next_parm_info_bit
- 1));
14794 next_parm_info_bit
-= 2;
14798 fixed_parms
+= ((GET_MODE_SIZE (mode
)
14799 + (UNITS_PER_WORD
- 1))
14801 next_parm_info_bit
-= 1;
14807 /* Number of fixed point parameters. */
14808 /* This is actually the number of words of fixed point parameters; thus
14809 an 8 byte struct counts as 2; and thus the maximum value is 8. */
14810 fprintf (file
, "%d,", fixed_parms
);
14812 /* 2 bitfields: number of floating point parameters (7 bits), parameters
14814 /* This is actually the number of fp registers that hold parameters;
14815 and thus the maximum value is 13. */
14816 /* Set parameters on stack bit if parameters are not in their original
14817 registers, regardless of whether they are on the stack? Xlc
14818 seems to set the bit when not optimizing. */
14819 fprintf (file
, "%d\n", ((float_parms
<< 1) | (! optimize
)));
14821 if (! optional_tbtab
)
14824 /* Optional fields follow. Some are variable length. */
14826 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
14827 11 double float. */
14828 /* There is an entry for each parameter in a register, in the order that
14829 they occur in the parameter list. Any intervening arguments on the
14830 stack are ignored. If the list overflows a long (max possible length
14831 34 bits) then completely leave off all elements that don't fit. */
14832 /* Only emit this long if there was at least one parameter. */
14833 if (fixed_parms
|| float_parms
)
14834 fprintf (file
, "\t.long %d\n", parm_info
);
14836 /* Offset from start of code to tb table. */
14837 fputs ("\t.long ", file
);
14838 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
14840 RS6000_OUTPUT_BASENAME (file
, fname
);
14842 assemble_name (file
, fname
);
14844 rs6000_output_function_entry (file
, fname
);
14847 /* Interrupt handler mask. */
14848 /* Omit this long, since we never set the interrupt handler bit
14851 /* Number of CTL (controlled storage) anchors. */
14852 /* Omit this long, since the has_ctl bit is never set above. */
14854 /* Displacement into stack of each CTL anchor. */
14855 /* Omit this list of longs, because there are no CTL anchors. */
14857 /* Length of function name. */
14860 fprintf (file
, "\t.short %d\n", (int) strlen (fname
));
14862 /* Function name. */
14863 assemble_string (fname
, strlen (fname
));
14865 /* Register for alloca automatic storage; this is always reg 31.
14866 Only emit this if the alloca bit was set above. */
14867 if (frame_pointer_needed
)
14868 fputs ("\t.byte 31\n", file
);
14870 fputs ("\t.align 2\n", file
);
14874 /* A C compound statement that outputs the assembler code for a thunk
14875 function, used to implement C++ virtual function calls with
14876 multiple inheritance. The thunk acts as a wrapper around a virtual
14877 function, adjusting the implicit object parameter before handing
14878 control off to the real function.
14880 First, emit code to add the integer DELTA to the location that
14881 contains the incoming first argument. Assume that this argument
14882 contains a pointer, and is the one used to pass the `this' pointer
14883 in C++. This is the incoming argument *before* the function
14884 prologue, e.g. `%o0' on a sparc. The addition must preserve the
14885 values of all other incoming arguments.
14887 After the addition, emit code to jump to FUNCTION, which is a
14888 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
14889 not touch the return address. Hence returning from FUNCTION will
14890 return to whoever called the current `thunk'.
14892 The effect must be as if FUNCTION had been called directly with the
14893 adjusted first argument. This macro is responsible for emitting
14894 all of the code for a thunk function; output_function_prologue()
14895 and output_function_epilogue() are not invoked.
14897 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
14898 been extracted from it.) It might possibly be useful on some
14899 targets, but probably not.
14901 If you do not define this macro, the target-independent code in the
14902 C++ frontend will generate a less efficient heavyweight thunk that
14903 calls FUNCTION instead of jumping to it. The generic approach does
14904 not support varargs. */
14907 rs6000_output_mi_thunk (FILE *file
, tree thunk_fndecl ATTRIBUTE_UNUSED
,
14908 HOST_WIDE_INT delta
, HOST_WIDE_INT vcall_offset
,
14911 rtx
this, insn
, funexp
;
14913 reload_completed
= 1;
14914 epilogue_completed
= 1;
14915 no_new_pseudos
= 1;
14916 reset_block_changes ();
14918 /* Mark the end of the (empty) prologue. */
14919 emit_note (NOTE_INSN_PROLOGUE_END
);
14921 /* Find the "this" pointer. If the function returns a structure,
14922 the structure return pointer is in r3. */
14923 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
)), function
))
14924 this = gen_rtx_REG (Pmode
, 4);
14926 this = gen_rtx_REG (Pmode
, 3);
14928 /* Apply the constant offset, if required. */
14931 rtx delta_rtx
= GEN_INT (delta
);
14932 emit_insn (TARGET_32BIT
14933 ? gen_addsi3 (this, this, delta_rtx
)
14934 : gen_adddi3 (this, this, delta_rtx
));
14937 /* Apply the offset from the vtable, if required. */
14940 rtx vcall_offset_rtx
= GEN_INT (vcall_offset
);
14941 rtx tmp
= gen_rtx_REG (Pmode
, 12);
14943 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, this));
14944 if (((unsigned HOST_WIDE_INT
) vcall_offset
) + 0x8000 >= 0x10000)
14946 emit_insn (TARGET_32BIT
14947 ? gen_addsi3 (tmp
, tmp
, vcall_offset_rtx
)
14948 : gen_adddi3 (tmp
, tmp
, vcall_offset_rtx
));
14949 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, tmp
));
14953 rtx loc
= gen_rtx_PLUS (Pmode
, tmp
, vcall_offset_rtx
);
14955 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, loc
));
14957 emit_insn (TARGET_32BIT
14958 ? gen_addsi3 (this, this, tmp
)
14959 : gen_adddi3 (this, this, tmp
));
14962 /* Generate a tail call to the target function. */
14963 if (!TREE_USED (function
))
14965 assemble_external (function
);
14966 TREE_USED (function
) = 1;
14968 funexp
= XEXP (DECL_RTL (function
), 0);
14969 funexp
= gen_rtx_MEM (FUNCTION_MODE
, funexp
);
14972 if (MACHOPIC_INDIRECT
)
14973 funexp
= machopic_indirect_call_target (funexp
);
14976 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
14977 generate sibcall RTL explicitly. */
14978 insn
= emit_call_insn (
14979 gen_rtx_PARALLEL (VOIDmode
,
14981 gen_rtx_CALL (VOIDmode
,
14982 funexp
, const0_rtx
),
14983 gen_rtx_USE (VOIDmode
, const0_rtx
),
14984 gen_rtx_USE (VOIDmode
,
14985 gen_rtx_REG (SImode
,
14986 LINK_REGISTER_REGNUM
)),
14987 gen_rtx_RETURN (VOIDmode
))));
14988 SIBLING_CALL_P (insn
) = 1;
14991 /* Run just enough of rest_of_compilation to get the insns emitted.
14992 There's not really enough bulk here to make other passes such as
14993 instruction scheduling worth while. Note that use_thunk calls
14994 assemble_start_function and assemble_end_function. */
14995 insn
= get_insns ();
14996 insn_locators_initialize ();
14997 shorten_branches (insn
);
14998 final_start_function (insn
, file
, 1);
14999 final (insn
, file
, 1);
15000 final_end_function ();
15002 reload_completed
= 0;
15003 epilogue_completed
= 0;
15004 no_new_pseudos
= 0;
15007 /* A quick summary of the various types of 'constant-pool tables'
15010 Target Flags Name One table per
15011 AIX (none) AIX TOC object file
15012 AIX -mfull-toc AIX TOC object file
15013 AIX -mminimal-toc AIX minimal TOC translation unit
15014 SVR4/EABI (none) SVR4 SDATA object file
15015 SVR4/EABI -fpic SVR4 pic object file
15016 SVR4/EABI -fPIC SVR4 PIC translation unit
15017 SVR4/EABI -mrelocatable EABI TOC function
15018 SVR4/EABI -maix AIX TOC object file
15019 SVR4/EABI -maix -mminimal-toc
15020 AIX minimal TOC translation unit
15022 Name Reg. Set by entries contains:
15023 made by addrs? fp? sum?
15025 AIX TOC 2 crt0 as Y option option
15026 AIX minimal TOC 30 prolog gcc Y Y option
15027 SVR4 SDATA 13 crt0 gcc N Y N
15028 SVR4 pic 30 prolog ld Y not yet N
15029 SVR4 PIC 30 prolog gcc Y option option
15030 EABI TOC 30 prolog gcc Y option option
15034 /* Hash functions for the hash table. */
15037 rs6000_hash_constant (rtx k
)
15039 enum rtx_code code
= GET_CODE (k
);
15040 enum machine_mode mode
= GET_MODE (k
);
15041 unsigned result
= (code
<< 3) ^ mode
;
15042 const char *format
;
15045 format
= GET_RTX_FORMAT (code
);
15046 flen
= strlen (format
);
15052 return result
* 1231 + (unsigned) INSN_UID (XEXP (k
, 0));
15055 if (mode
!= VOIDmode
)
15056 return real_hash (CONST_DOUBLE_REAL_VALUE (k
)) * result
;
15068 for (; fidx
< flen
; fidx
++)
15069 switch (format
[fidx
])
15074 const char *str
= XSTR (k
, fidx
);
15075 len
= strlen (str
);
15076 result
= result
* 613 + len
;
15077 for (i
= 0; i
< len
; i
++)
15078 result
= result
* 613 + (unsigned) str
[i
];
15083 result
= result
* 1231 + rs6000_hash_constant (XEXP (k
, fidx
));
15087 result
= result
* 613 + (unsigned) XINT (k
, fidx
);
15090 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT
))
15091 result
= result
* 613 + (unsigned) XWINT (k
, fidx
);
15095 for (i
= 0; i
< sizeof (HOST_WIDE_INT
) / sizeof (unsigned); i
++)
15096 result
= result
* 613 + (unsigned) (XWINT (k
, fidx
)
15103 gcc_unreachable ();
15110 toc_hash_function (const void *hash_entry
)
15112 const struct toc_hash_struct
*thc
=
15113 (const struct toc_hash_struct
*) hash_entry
;
15114 return rs6000_hash_constant (thc
->key
) ^ thc
->key_mode
;
15117 /* Compare H1 and H2 for equivalence. */
15120 toc_hash_eq (const void *h1
, const void *h2
)
15122 rtx r1
= ((const struct toc_hash_struct
*) h1
)->key
;
15123 rtx r2
= ((const struct toc_hash_struct
*) h2
)->key
;
15125 if (((const struct toc_hash_struct
*) h1
)->key_mode
15126 != ((const struct toc_hash_struct
*) h2
)->key_mode
)
15129 return rtx_equal_p (r1
, r2
);
15132 /* These are the names given by the C++ front-end to vtables, and
15133 vtable-like objects. Ideally, this logic should not be here;
15134 instead, there should be some programmatic way of inquiring as
15135 to whether or not an object is a vtable. */
15137 #define VTABLE_NAME_P(NAME) \
15138 (strncmp ("_vt.", name, strlen ("_vt.")) == 0 \
15139 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
15140 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
15141 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
15142 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
15145 rs6000_output_symbol_ref (FILE *file
, rtx x
)
15147 /* Currently C++ toc references to vtables can be emitted before it
15148 is decided whether the vtable is public or private. If this is
15149 the case, then the linker will eventually complain that there is
15150 a reference to an unknown section. Thus, for vtables only,
15151 we emit the TOC reference to reference the symbol and not the
15153 const char *name
= XSTR (x
, 0);
15155 if (VTABLE_NAME_P (name
))
15157 RS6000_OUTPUT_BASENAME (file
, name
);
15160 assemble_name (file
, name
);
15163 /* Output a TOC entry. We derive the entry name from what is being
15167 output_toc (FILE *file
, rtx x
, int labelno
, enum machine_mode mode
)
15170 const char *name
= buf
;
15171 const char *real_name
;
15175 gcc_assert (!TARGET_NO_TOC
);
15177 /* When the linker won't eliminate them, don't output duplicate
15178 TOC entries (this happens on AIX if there is any kind of TOC,
15179 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
15181 if (TARGET_TOC
&& GET_CODE (x
) != LABEL_REF
)
15183 struct toc_hash_struct
*h
;
15186 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
15187 time because GGC is not initialized at that point. */
15188 if (toc_hash_table
== NULL
)
15189 toc_hash_table
= htab_create_ggc (1021, toc_hash_function
,
15190 toc_hash_eq
, NULL
);
15192 h
= ggc_alloc (sizeof (*h
));
15194 h
->key_mode
= mode
;
15195 h
->labelno
= labelno
;
15197 found
= htab_find_slot (toc_hash_table
, h
, 1);
15198 if (*found
== NULL
)
15200 else /* This is indeed a duplicate.
15201 Set this label equal to that label. */
15203 fputs ("\t.set ", file
);
15204 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
15205 fprintf (file
, "%d,", labelno
);
15206 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
15207 fprintf (file
, "%d\n", ((*(const struct toc_hash_struct
**)
15213 /* If we're going to put a double constant in the TOC, make sure it's
15214 aligned properly when strict alignment is on. */
15215 if (GET_CODE (x
) == CONST_DOUBLE
15216 && STRICT_ALIGNMENT
15217 && GET_MODE_BITSIZE (mode
) >= 64
15218 && ! (TARGET_NO_FP_IN_TOC
&& ! TARGET_MINIMAL_TOC
)) {
15219 ASM_OUTPUT_ALIGN (file
, 3);
15222 (*targetm
.asm_out
.internal_label
) (file
, "LC", labelno
);
15224 /* Handle FP constants specially. Note that if we have a minimal
15225 TOC, things we put here aren't actually in the TOC, so we can allow
15227 if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == TFmode
)
15229 REAL_VALUE_TYPE rv
;
15232 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
15233 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv
, k
);
15237 if (TARGET_MINIMAL_TOC
)
15238 fputs (DOUBLE_INT_ASM_OP
, file
);
15240 fprintf (file
, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
15241 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
15242 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
15243 fprintf (file
, "0x%lx%08lx,0x%lx%08lx\n",
15244 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
15245 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
15250 if (TARGET_MINIMAL_TOC
)
15251 fputs ("\t.long ", file
);
15253 fprintf (file
, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
15254 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
15255 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
15256 fprintf (file
, "0x%lx,0x%lx,0x%lx,0x%lx\n",
15257 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
15258 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
15262 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == DFmode
)
15264 REAL_VALUE_TYPE rv
;
15267 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
15268 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
15272 if (TARGET_MINIMAL_TOC
)
15273 fputs (DOUBLE_INT_ASM_OP
, file
);
15275 fprintf (file
, "\t.tc FD_%lx_%lx[TC],",
15276 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
15277 fprintf (file
, "0x%lx%08lx\n",
15278 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
15283 if (TARGET_MINIMAL_TOC
)
15284 fputs ("\t.long ", file
);
15286 fprintf (file
, "\t.tc FD_%lx_%lx[TC],",
15287 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
15288 fprintf (file
, "0x%lx,0x%lx\n",
15289 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
15293 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == SFmode
)
15295 REAL_VALUE_TYPE rv
;
15298 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
15299 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
15303 if (TARGET_MINIMAL_TOC
)
15304 fputs (DOUBLE_INT_ASM_OP
, file
);
15306 fprintf (file
, "\t.tc FS_%lx[TC],", l
& 0xffffffff);
15307 fprintf (file
, "0x%lx00000000\n", l
& 0xffffffff);
15312 if (TARGET_MINIMAL_TOC
)
15313 fputs ("\t.long ", file
);
15315 fprintf (file
, "\t.tc FS_%lx[TC],", l
& 0xffffffff);
15316 fprintf (file
, "0x%lx\n", l
& 0xffffffff);
15320 else if (GET_MODE (x
) == VOIDmode
15321 && (GET_CODE (x
) == CONST_INT
|| GET_CODE (x
) == CONST_DOUBLE
))
15323 unsigned HOST_WIDE_INT low
;
15324 HOST_WIDE_INT high
;
15326 if (GET_CODE (x
) == CONST_DOUBLE
)
15328 low
= CONST_DOUBLE_LOW (x
);
15329 high
= CONST_DOUBLE_HIGH (x
);
15332 #if HOST_BITS_PER_WIDE_INT == 32
15335 high
= (low
& 0x80000000) ? ~0 : 0;
15339 low
= INTVAL (x
) & 0xffffffff;
15340 high
= (HOST_WIDE_INT
) INTVAL (x
) >> 32;
15344 /* TOC entries are always Pmode-sized, but since this
15345 is a bigendian machine then if we're putting smaller
15346 integer constants in the TOC we have to pad them.
15347 (This is still a win over putting the constants in
15348 a separate constant pool, because then we'd have
15349 to have both a TOC entry _and_ the actual constant.)
15351 For a 32-bit target, CONST_INT values are loaded and shifted
15352 entirely within `low' and can be stored in one TOC entry. */
15354 /* It would be easy to make this work, but it doesn't now. */
15355 gcc_assert (!TARGET_64BIT
|| POINTER_SIZE
>= GET_MODE_BITSIZE (mode
));
15357 if (POINTER_SIZE
> GET_MODE_BITSIZE (mode
))
15359 #if HOST_BITS_PER_WIDE_INT == 32
15360 lshift_double (low
, high
, POINTER_SIZE
- GET_MODE_BITSIZE (mode
),
15361 POINTER_SIZE
, &low
, &high
, 0);
15364 low
<<= POINTER_SIZE
- GET_MODE_BITSIZE (mode
);
15365 high
= (HOST_WIDE_INT
) low
>> 32;
15372 if (TARGET_MINIMAL_TOC
)
15373 fputs (DOUBLE_INT_ASM_OP
, file
);
15375 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
15376 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
15377 fprintf (file
, "0x%lx%08lx\n",
15378 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
15383 if (POINTER_SIZE
< GET_MODE_BITSIZE (mode
))
15385 if (TARGET_MINIMAL_TOC
)
15386 fputs ("\t.long ", file
);
15388 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
15389 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
15390 fprintf (file
, "0x%lx,0x%lx\n",
15391 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
15395 if (TARGET_MINIMAL_TOC
)
15396 fputs ("\t.long ", file
);
15398 fprintf (file
, "\t.tc IS_%lx[TC],", (long) low
& 0xffffffff);
15399 fprintf (file
, "0x%lx\n", (long) low
& 0xffffffff);
15405 if (GET_CODE (x
) == CONST
)
15407 gcc_assert (GET_CODE (XEXP (x
, 0)) == PLUS
);
15409 base
= XEXP (XEXP (x
, 0), 0);
15410 offset
= INTVAL (XEXP (XEXP (x
, 0), 1));
15413 switch (GET_CODE (base
))
15416 name
= XSTR (base
, 0);
15420 ASM_GENERATE_INTERNAL_LABEL (buf
, "L",
15421 CODE_LABEL_NUMBER (XEXP (base
, 0)));
15425 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (base
));
15429 gcc_unreachable ();
15432 real_name
= (*targetm
.strip_name_encoding
) (name
);
15433 if (TARGET_MINIMAL_TOC
)
15434 fputs (TARGET_32BIT
? "\t.long " : DOUBLE_INT_ASM_OP
, file
);
15437 fprintf (file
, "\t.tc %s", real_name
);
15440 fprintf (file
, ".N%d", - offset
);
15442 fprintf (file
, ".P%d", offset
);
15444 fputs ("[TC],", file
);
15447 /* Currently C++ toc references to vtables can be emitted before it
15448 is decided whether the vtable is public or private. If this is
15449 the case, then the linker will eventually complain that there is
15450 a TOC reference to an unknown section. Thus, for vtables only,
15451 we emit the TOC reference to reference the symbol and not the
15453 if (VTABLE_NAME_P (name
))
15455 RS6000_OUTPUT_BASENAME (file
, name
);
15457 fprintf (file
, "%d", offset
);
15458 else if (offset
> 0)
15459 fprintf (file
, "+%d", offset
);
15462 output_addr_const (file
, x
);
15466 /* Output an assembler pseudo-op to write an ASCII string of N characters
15467 starting at P to FILE.
15469 On the RS/6000, we have to do this using the .byte operation and
15470 write out special characters outside the quoted string.
15471 Also, the assembler is broken; very long strings are truncated,
15472 so we must artificially break them up early. */
15475 output_ascii (FILE *file
, const char *p
, int n
)
15478 int i
, count_string
;
15479 const char *for_string
= "\t.byte \"";
15480 const char *for_decimal
= "\t.byte ";
15481 const char *to_close
= NULL
;
15484 for (i
= 0; i
< n
; i
++)
15487 if (c
>= ' ' && c
< 0177)
15490 fputs (for_string
, file
);
15493 /* Write two quotes to get one. */
15501 for_decimal
= "\"\n\t.byte ";
15505 if (count_string
>= 512)
15507 fputs (to_close
, file
);
15509 for_string
= "\t.byte \"";
15510 for_decimal
= "\t.byte ";
15518 fputs (for_decimal
, file
);
15519 fprintf (file
, "%d", c
);
15521 for_string
= "\n\t.byte \"";
15522 for_decimal
= ", ";
15528 /* Now close the string if we have written one. Then end the line. */
15530 fputs (to_close
, file
);
15533 /* Generate a unique section name for FILENAME for a section type
15534 represented by SECTION_DESC. Output goes into BUF.
15536 SECTION_DESC can be any string, as long as it is different for each
15537 possible section type.
15539 We name the section in the same manner as xlc. The name begins with an
15540 underscore followed by the filename (after stripping any leading directory
15541 names) with the last period replaced by the string SECTION_DESC. If
15542 FILENAME does not contain a period, SECTION_DESC is appended to the end of
15546 rs6000_gen_section_name (char **buf
, const char *filename
,
15547 const char *section_desc
)
15549 const char *q
, *after_last_slash
, *last_period
= 0;
15553 after_last_slash
= filename
;
15554 for (q
= filename
; *q
; q
++)
15557 after_last_slash
= q
+ 1;
15558 else if (*q
== '.')
15562 len
= strlen (after_last_slash
) + strlen (section_desc
) + 2;
15563 *buf
= (char *) xmalloc (len
);
15568 for (q
= after_last_slash
; *q
; q
++)
15570 if (q
== last_period
)
15572 strcpy (p
, section_desc
);
15573 p
+= strlen (section_desc
);
15577 else if (ISALNUM (*q
))
15581 if (last_period
== 0)
15582 strcpy (p
, section_desc
);
15587 /* Emit profile function. */
15590 output_profile_hook (int labelno ATTRIBUTE_UNUSED
)
15592 /* Non-standard profiling for kernels, which just saves LR then calls
15593 _mcount without worrying about arg saves. The idea is to change
15594 the function prologue as little as possible as it isn't easy to
15595 account for arg save/restore code added just for _mcount. */
15596 if (TARGET_PROFILE_KERNEL
)
15599 if (DEFAULT_ABI
== ABI_AIX
)
15601 #ifndef NO_PROFILE_COUNTERS
15602 # define NO_PROFILE_COUNTERS 0
15604 if (NO_PROFILE_COUNTERS
)
15605 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 0);
15609 const char *label_name
;
15612 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
15613 label_name
= (*targetm
.strip_name_encoding
) (ggc_strdup (buf
));
15614 fun
= gen_rtx_SYMBOL_REF (Pmode
, label_name
);
15616 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 1,
15620 else if (DEFAULT_ABI
== ABI_DARWIN
)
15622 const char *mcount_name
= RS6000_MCOUNT
;
15623 int caller_addr_regno
= LINK_REGISTER_REGNUM
;
15625 /* Be conservative and always set this, at least for now. */
15626 current_function_uses_pic_offset_table
= 1;
15629 /* For PIC code, set up a stub and collect the caller's address
15630 from r0, which is where the prologue puts it. */
15631 if (MACHOPIC_INDIRECT
15632 && current_function_uses_pic_offset_table
)
15633 caller_addr_regno
= 0;
15635 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, mcount_name
),
15637 gen_rtx_REG (Pmode
, caller_addr_regno
), Pmode
);
15641 /* Write function profiler code. */
15644 output_function_profiler (FILE *file
, int labelno
)
15648 switch (DEFAULT_ABI
)
15651 gcc_unreachable ();
15656 warning (0, "no profiling of 64-bit code for this ABI");
15659 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
15660 fprintf (file
, "\tmflr %s\n", reg_names
[0]);
15661 if (NO_PROFILE_COUNTERS
)
15663 asm_fprintf (file
, "\t{st|stw} %s,4(%s)\n",
15664 reg_names
[0], reg_names
[1]);
15666 else if (TARGET_SECURE_PLT
&& flag_pic
)
15668 asm_fprintf (file
, "\tbcl 20,31,1f\n1:\n\t{st|stw} %s,4(%s)\n",
15669 reg_names
[0], reg_names
[1]);
15670 asm_fprintf (file
, "\tmflr %s\n", reg_names
[12]);
15671 asm_fprintf (file
, "\t{cau|addis} %s,%s,",
15672 reg_names
[12], reg_names
[12]);
15673 assemble_name (file
, buf
);
15674 asm_fprintf (file
, "-1b@ha\n\t{cal|la} %s,", reg_names
[0]);
15675 assemble_name (file
, buf
);
15676 asm_fprintf (file
, "-1b@l(%s)\n", reg_names
[12]);
15678 else if (flag_pic
== 1)
15680 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file
);
15681 asm_fprintf (file
, "\t{st|stw} %s,4(%s)\n",
15682 reg_names
[0], reg_names
[1]);
15683 asm_fprintf (file
, "\tmflr %s\n", reg_names
[12]);
15684 asm_fprintf (file
, "\t{l|lwz} %s,", reg_names
[0]);
15685 assemble_name (file
, buf
);
15686 asm_fprintf (file
, "@got(%s)\n", reg_names
[12]);
15688 else if (flag_pic
> 1)
15690 asm_fprintf (file
, "\t{st|stw} %s,4(%s)\n",
15691 reg_names
[0], reg_names
[1]);
15692 /* Now, we need to get the address of the label. */
15693 fputs ("\tbcl 20,31,1f\n\t.long ", file
);
15694 assemble_name (file
, buf
);
15695 fputs ("-.\n1:", file
);
15696 asm_fprintf (file
, "\tmflr %s\n", reg_names
[11]);
15697 asm_fprintf (file
, "\t{l|lwz} %s,0(%s)\n",
15698 reg_names
[0], reg_names
[11]);
15699 asm_fprintf (file
, "\t{cax|add} %s,%s,%s\n",
15700 reg_names
[0], reg_names
[0], reg_names
[11]);
15704 asm_fprintf (file
, "\t{liu|lis} %s,", reg_names
[12]);
15705 assemble_name (file
, buf
);
15706 fputs ("@ha\n", file
);
15707 asm_fprintf (file
, "\t{st|stw} %s,4(%s)\n",
15708 reg_names
[0], reg_names
[1]);
15709 asm_fprintf (file
, "\t{cal|la} %s,", reg_names
[0]);
15710 assemble_name (file
, buf
);
15711 asm_fprintf (file
, "@l(%s)\n", reg_names
[12]);
15714 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
15715 fprintf (file
, "\tbl %s%s\n",
15716 RS6000_MCOUNT
, flag_pic
? "@plt" : "");
15721 if (!TARGET_PROFILE_KERNEL
)
15723 /* Don't do anything, done in output_profile_hook (). */
15727 gcc_assert (!TARGET_32BIT
);
15729 asm_fprintf (file
, "\tmflr %s\n", reg_names
[0]);
15730 asm_fprintf (file
, "\tstd %s,16(%s)\n", reg_names
[0], reg_names
[1]);
15732 if (cfun
->static_chain_decl
!= NULL
)
15734 asm_fprintf (file
, "\tstd %s,24(%s)\n",
15735 reg_names
[STATIC_CHAIN_REGNUM
], reg_names
[1]);
15736 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
15737 asm_fprintf (file
, "\tld %s,24(%s)\n",
15738 reg_names
[STATIC_CHAIN_REGNUM
], reg_names
[1]);
15741 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
15748 /* Power4 load update and store update instructions are cracked into a
15749 load or store and an integer insn which are executed in the same cycle.
15750 Branches have their own dispatch slot which does not count against the
15751 GCC issue rate, but it changes the program flow so there are no other
15752 instructions to issue in this cycle. */
15755 rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED
,
15756 int verbose ATTRIBUTE_UNUSED
,
15757 rtx insn
, int more
)
15759 if (GET_CODE (PATTERN (insn
)) == USE
15760 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
15763 if (rs6000_sched_groups
)
15765 if (is_microcoded_insn (insn
))
15767 else if (is_cracked_insn (insn
))
15768 return more
> 2 ? more
- 2 : 0;
15774 /* Adjust the cost of a scheduling dependency. Return the new cost of
15775 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
15778 rs6000_adjust_cost (rtx insn
, rtx link
, rtx dep_insn
, int cost
)
15780 if (! recog_memoized (insn
))
15783 if (REG_NOTE_KIND (link
) != 0)
15786 if (REG_NOTE_KIND (link
) == 0)
15788 /* Data dependency; DEP_INSN writes a register that INSN reads
15789 some cycles later. */
15791 /* Separate a load from a narrower, dependent store. */
15792 if (rs6000_sched_groups
15793 && GET_CODE (PATTERN (insn
)) == SET
15794 && GET_CODE (PATTERN (dep_insn
)) == SET
15795 && GET_CODE (XEXP (PATTERN (insn
), 1)) == MEM
15796 && GET_CODE (XEXP (PATTERN (dep_insn
), 0)) == MEM
15797 && (GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (insn
), 1)))
15798 > GET_MODE_SIZE (GET_MODE (XEXP (PATTERN (dep_insn
), 0)))))
15801 switch (get_attr_type (insn
))
15804 /* Tell the first scheduling pass about the latency between
15805 a mtctr and bctr (and mtlr and br/blr). The first
15806 scheduling pass will not know about this latency since
15807 the mtctr instruction, which has the latency associated
15808 to it, will be generated by reload. */
15809 return TARGET_POWER
? 5 : 4;
15811 /* Leave some extra cycles between a compare and its
15812 dependent branch, to inhibit expensive mispredicts. */
15813 if ((rs6000_cpu_attr
== CPU_PPC603
15814 || rs6000_cpu_attr
== CPU_PPC604
15815 || rs6000_cpu_attr
== CPU_PPC604E
15816 || rs6000_cpu_attr
== CPU_PPC620
15817 || rs6000_cpu_attr
== CPU_PPC630
15818 || rs6000_cpu_attr
== CPU_PPC750
15819 || rs6000_cpu_attr
== CPU_PPC7400
15820 || rs6000_cpu_attr
== CPU_PPC7450
15821 || rs6000_cpu_attr
== CPU_POWER4
15822 || rs6000_cpu_attr
== CPU_POWER5
)
15823 && recog_memoized (dep_insn
)
15824 && (INSN_CODE (dep_insn
) >= 0)
15825 && (get_attr_type (dep_insn
) == TYPE_CMP
15826 || get_attr_type (dep_insn
) == TYPE_COMPARE
15827 || get_attr_type (dep_insn
) == TYPE_DELAYED_COMPARE
15828 || get_attr_type (dep_insn
) == TYPE_IMUL_COMPARE
15829 || get_attr_type (dep_insn
) == TYPE_LMUL_COMPARE
15830 || get_attr_type (dep_insn
) == TYPE_FPCOMPARE
15831 || get_attr_type (dep_insn
) == TYPE_CR_LOGICAL
15832 || get_attr_type (dep_insn
) == TYPE_DELAYED_CR
))
15837 /* Fall out to return default cost. */
15843 /* The function returns a true if INSN is microcoded.
15844 Return false otherwise. */
15847 is_microcoded_insn (rtx insn
)
15849 if (!insn
|| !INSN_P (insn
)
15850 || GET_CODE (PATTERN (insn
)) == USE
15851 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
15854 if (rs6000_sched_groups
)
15856 enum attr_type type
= get_attr_type (insn
);
15857 if (type
== TYPE_LOAD_EXT_U
15858 || type
== TYPE_LOAD_EXT_UX
15859 || type
== TYPE_LOAD_UX
15860 || type
== TYPE_STORE_UX
15861 || type
== TYPE_MFCR
)
15868 /* The function returns a nonzero value if INSN can be scheduled only
15869 as the first insn in a dispatch group ("dispatch-slot restricted").
15870 In this case, the returned value indicates how many dispatch slots
15871 the insn occupies (at the beginning of the group).
15872 Return 0 otherwise. */
15875 is_dispatch_slot_restricted (rtx insn
)
15877 enum attr_type type
;
15879 if (!rs6000_sched_groups
)
15883 || insn
== NULL_RTX
15884 || GET_CODE (insn
) == NOTE
15885 || GET_CODE (PATTERN (insn
)) == USE
15886 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
15889 type
= get_attr_type (insn
);
15896 case TYPE_DELAYED_CR
:
15897 case TYPE_CR_LOGICAL
:
15910 if (rs6000_cpu
== PROCESSOR_POWER5
15911 && is_cracked_insn (insn
))
15917 /* The function returns true if INSN is cracked into 2 instructions
15918 by the processor (and therefore occupies 2 issue slots). */
15921 is_cracked_insn (rtx insn
)
15923 if (!insn
|| !INSN_P (insn
)
15924 || GET_CODE (PATTERN (insn
)) == USE
15925 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
15928 if (rs6000_sched_groups
)
15930 enum attr_type type
= get_attr_type (insn
);
15931 if (type
== TYPE_LOAD_U
|| type
== TYPE_STORE_U
15932 || type
== TYPE_FPLOAD_U
|| type
== TYPE_FPSTORE_U
15933 || type
== TYPE_FPLOAD_UX
|| type
== TYPE_FPSTORE_UX
15934 || type
== TYPE_LOAD_EXT
|| type
== TYPE_DELAYED_CR
15935 || type
== TYPE_COMPARE
|| type
== TYPE_DELAYED_COMPARE
15936 || type
== TYPE_IMUL_COMPARE
|| type
== TYPE_LMUL_COMPARE
15937 || type
== TYPE_IDIV
|| type
== TYPE_LDIV
15938 || type
== TYPE_INSERT_WORD
)
15945 /* The function returns true if INSN can be issued only from
15946 the branch slot. */
15949 is_branch_slot_insn (rtx insn
)
15951 if (!insn
|| !INSN_P (insn
)
15952 || GET_CODE (PATTERN (insn
)) == USE
15953 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
15956 if (rs6000_sched_groups
)
15958 enum attr_type type
= get_attr_type (insn
);
15959 if (type
== TYPE_BRANCH
|| type
== TYPE_JMPREG
)
15967 /* A C statement (sans semicolon) to update the integer scheduling
15968 priority INSN_PRIORITY (INSN). Increase the priority to execute the
15969 INSN earlier, reduce the priority to execute INSN later. Do not
15970 define this macro if you do not need to adjust the scheduling
15971 priorities of insns. */
15974 rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED
, int priority
)
15976 /* On machines (like the 750) which have asymmetric integer units,
15977 where one integer unit can do multiply and divides and the other
15978 can't, reduce the priority of multiply/divide so it is scheduled
15979 before other integer operations. */
15982 if (! INSN_P (insn
))
15985 if (GET_CODE (PATTERN (insn
)) == USE
)
15988 switch (rs6000_cpu_attr
) {
15990 switch (get_attr_type (insn
))
15997 fprintf (stderr
, "priority was %#x (%d) before adjustment\n",
15998 priority
, priority
);
15999 if (priority
>= 0 && priority
< 0x01000000)
16006 if (is_dispatch_slot_restricted (insn
)
16007 && reload_completed
16008 && current_sched_info
->sched_max_insns_priority
16009 && rs6000_sched_restricted_insns_priority
)
16012 /* Prioritize insns that can be dispatched only in the first
16014 if (rs6000_sched_restricted_insns_priority
== 1)
16015 /* Attach highest priority to insn. This means that in
16016 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
16017 precede 'priority' (critical path) considerations. */
16018 return current_sched_info
->sched_max_insns_priority
;
16019 else if (rs6000_sched_restricted_insns_priority
== 2)
16020 /* Increase priority of insn by a minimal amount. This means that in
16021 haifa-sched.c:ready_sort(), only 'priority' (critical path)
16022 considerations precede dispatch-slot restriction considerations. */
16023 return (priority
+ 1);
16029 /* Return how many instructions the machine can issue per cycle. */
16032 rs6000_issue_rate (void)
16034 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
16035 if (!reload_completed
)
16038 switch (rs6000_cpu_attr
) {
16039 case CPU_RIOS1
: /* ? */
16041 case CPU_PPC601
: /* ? */
16064 /* Return how many instructions to look ahead for better insn
16068 rs6000_use_sched_lookahead (void)
16070 if (rs6000_cpu_attr
== CPU_PPC8540
)
16075 /* Determine is PAT refers to memory. */
16078 is_mem_ref (rtx pat
)
16084 if (GET_CODE (pat
) == MEM
)
16087 /* Recursively process the pattern. */
16088 fmt
= GET_RTX_FORMAT (GET_CODE (pat
));
16090 for (i
= GET_RTX_LENGTH (GET_CODE (pat
)) - 1; i
>= 0 && !ret
; i
--)
16093 ret
|= is_mem_ref (XEXP (pat
, i
));
16094 else if (fmt
[i
] == 'E')
16095 for (j
= XVECLEN (pat
, i
) - 1; j
>= 0; j
--)
16096 ret
|= is_mem_ref (XVECEXP (pat
, i
, j
));
16102 /* Determine if PAT is a PATTERN of a load insn. */
16105 is_load_insn1 (rtx pat
)
16107 if (!pat
|| pat
== NULL_RTX
)
16110 if (GET_CODE (pat
) == SET
)
16111 return is_mem_ref (SET_SRC (pat
));
16113 if (GET_CODE (pat
) == PARALLEL
)
16117 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
16118 if (is_load_insn1 (XVECEXP (pat
, 0, i
)))
16125 /* Determine if INSN loads from memory. */
16128 is_load_insn (rtx insn
)
16130 if (!insn
|| !INSN_P (insn
))
16133 if (GET_CODE (insn
) == CALL_INSN
)
16136 return is_load_insn1 (PATTERN (insn
));
16139 /* Determine if PAT is a PATTERN of a store insn. */
16142 is_store_insn1 (rtx pat
)
16144 if (!pat
|| pat
== NULL_RTX
)
16147 if (GET_CODE (pat
) == SET
)
16148 return is_mem_ref (SET_DEST (pat
));
16150 if (GET_CODE (pat
) == PARALLEL
)
16154 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
16155 if (is_store_insn1 (XVECEXP (pat
, 0, i
)))
16162 /* Determine if INSN stores to memory. */
16165 is_store_insn (rtx insn
)
16167 if (!insn
|| !INSN_P (insn
))
16170 return is_store_insn1 (PATTERN (insn
));
16173 /* Returns whether the dependence between INSN and NEXT is considered
16174 costly by the given target. */
16177 rs6000_is_costly_dependence (rtx insn
, rtx next
, rtx link
, int cost
,
16180 /* If the flag is not enabled - no dependence is considered costly;
16181 allow all dependent insns in the same group.
16182 This is the most aggressive option. */
16183 if (rs6000_sched_costly_dep
== no_dep_costly
)
16186 /* If the flag is set to 1 - a dependence is always considered costly;
16187 do not allow dependent instructions in the same group.
16188 This is the most conservative option. */
16189 if (rs6000_sched_costly_dep
== all_deps_costly
)
16192 if (rs6000_sched_costly_dep
== store_to_load_dep_costly
16193 && is_load_insn (next
)
16194 && is_store_insn (insn
))
16195 /* Prevent load after store in the same group. */
16198 if (rs6000_sched_costly_dep
== true_store_to_load_dep_costly
16199 && is_load_insn (next
)
16200 && is_store_insn (insn
)
16201 && (!link
|| (int) REG_NOTE_KIND (link
) == 0))
16202 /* Prevent load after store in the same group if it is a true
16206 /* The flag is set to X; dependences with latency >= X are considered costly,
16207 and will not be scheduled in the same group. */
16208 if (rs6000_sched_costly_dep
<= max_dep_latency
16209 && ((cost
- distance
) >= (int)rs6000_sched_costly_dep
))
16215 /* Return the next insn after INSN that is found before TAIL is reached,
16216 skipping any "non-active" insns - insns that will not actually occupy
16217 an issue slot. Return NULL_RTX if such an insn is not found. */
16220 get_next_active_insn (rtx insn
, rtx tail
)
16224 if (!insn
|| insn
== tail
)
16227 next_insn
= NEXT_INSN (insn
);
16230 && next_insn
!= tail
16231 && (GET_CODE (next_insn
) == NOTE
16232 || GET_CODE (PATTERN (next_insn
)) == USE
16233 || GET_CODE (PATTERN (next_insn
)) == CLOBBER
))
16235 next_insn
= NEXT_INSN (next_insn
);
16238 if (!next_insn
|| next_insn
== tail
)
16244 /* Return whether the presence of INSN causes a dispatch group termination
16245 of group WHICH_GROUP.
16247 If WHICH_GROUP == current_group, this function will return true if INSN
16248 causes the termination of the current group (i.e, the dispatch group to
16249 which INSN belongs). This means that INSN will be the last insn in the
16250 group it belongs to.
16252 If WHICH_GROUP == previous_group, this function will return true if INSN
16253 causes the termination of the previous group (i.e, the dispatch group that
16254 precedes the group to which INSN belongs). This means that INSN will be
16255 the first insn in the group it belongs to). */
16258 insn_terminates_group_p (rtx insn
, enum group_termination which_group
)
16260 enum attr_type type
;
16265 type
= get_attr_type (insn
);
16267 if (is_microcoded_insn (insn
))
16270 if (which_group
== current_group
)
16272 if (is_branch_slot_insn (insn
))
16276 else if (which_group
== previous_group
)
16278 if (is_dispatch_slot_restricted (insn
))
16286 /* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
16287 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
16290 is_costly_group (rtx
*group_insns
, rtx next_insn
)
16295 int issue_rate
= rs6000_issue_rate ();
16297 for (i
= 0; i
< issue_rate
; i
++)
16299 rtx insn
= group_insns
[i
];
16302 for (link
= INSN_DEPEND (insn
); link
!= 0; link
= XEXP (link
, 1))
16304 rtx next
= XEXP (link
, 0);
16305 if (next
== next_insn
)
16307 cost
= insn_cost (insn
, link
, next_insn
);
16308 if (rs6000_is_costly_dependence (insn
, next_insn
, link
, cost
, 0))
16317 /* Utility of the function redefine_groups.
16318 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
16319 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
16320 to keep it "far" (in a separate group) from GROUP_INSNS, following
16321 one of the following schemes, depending on the value of the flag
16322 -minsert_sched_nops = X:
16323 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
16324 in order to force NEXT_INSN into a separate group.
16325 (2) X < sched_finish_regroup_exact: insert exactly X nops.
16326 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
16327 insertion (has a group just ended, how many vacant issue slots remain in the
16328 last group, and how many dispatch groups were encountered so far). */
16331 force_new_group (int sched_verbose
, FILE *dump
, rtx
*group_insns
,
16332 rtx next_insn
, bool *group_end
, int can_issue_more
,
16337 int issue_rate
= rs6000_issue_rate ();
16338 bool end
= *group_end
;
16341 if (next_insn
== NULL_RTX
)
16342 return can_issue_more
;
16344 if (rs6000_sched_insert_nops
> sched_finish_regroup_exact
)
16345 return can_issue_more
;
16347 force
= is_costly_group (group_insns
, next_insn
);
16349 return can_issue_more
;
16351 if (sched_verbose
> 6)
16352 fprintf (dump
,"force: group count = %d, can_issue_more = %d\n",
16353 *group_count
,can_issue_more
);
16355 if (rs6000_sched_insert_nops
== sched_finish_regroup_exact
)
16358 can_issue_more
= 0;
16360 /* Since only a branch can be issued in the last issue_slot, it is
16361 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
16362 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
16363 in this case the last nop will start a new group and the branch
16364 will be forced to the new group. */
16365 if (can_issue_more
&& !is_branch_slot_insn (next_insn
))
16368 while (can_issue_more
> 0)
16371 emit_insn_before (nop
, next_insn
);
16379 if (rs6000_sched_insert_nops
< sched_finish_regroup_exact
)
16381 int n_nops
= rs6000_sched_insert_nops
;
16383 /* Nops can't be issued from the branch slot, so the effective
16384 issue_rate for nops is 'issue_rate - 1'. */
16385 if (can_issue_more
== 0)
16386 can_issue_more
= issue_rate
;
16388 if (can_issue_more
== 0)
16390 can_issue_more
= issue_rate
- 1;
16393 for (i
= 0; i
< issue_rate
; i
++)
16395 group_insns
[i
] = 0;
16402 emit_insn_before (nop
, next_insn
);
16403 if (can_issue_more
== issue_rate
- 1) /* new group begins */
16406 if (can_issue_more
== 0)
16408 can_issue_more
= issue_rate
- 1;
16411 for (i
= 0; i
< issue_rate
; i
++)
16413 group_insns
[i
] = 0;
16419 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
16422 /* Is next_insn going to start a new group? */
16425 || (can_issue_more
== 1 && !is_branch_slot_insn (next_insn
))
16426 || (can_issue_more
<= 2 && is_cracked_insn (next_insn
))
16427 || (can_issue_more
< issue_rate
&&
16428 insn_terminates_group_p (next_insn
, previous_group
)));
16429 if (*group_end
&& end
)
16432 if (sched_verbose
> 6)
16433 fprintf (dump
, "done force: group count = %d, can_issue_more = %d\n",
16434 *group_count
, can_issue_more
);
16435 return can_issue_more
;
16438 return can_issue_more
;
16441 /* This function tries to synch the dispatch groups that the compiler "sees"
16442 with the dispatch groups that the processor dispatcher is expected to
16443 form in practice. It tries to achieve this synchronization by forcing the
16444 estimated processor grouping on the compiler (as opposed to the function
16445 'pad_goups' which tries to force the scheduler's grouping on the processor).
16447 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
16448 examines the (estimated) dispatch groups that will be formed by the processor
16449 dispatcher. It marks these group boundaries to reflect the estimated
16450 processor grouping, overriding the grouping that the scheduler had marked.
16451 Depending on the value of the flag '-minsert-sched-nops' this function can
16452 force certain insns into separate groups or force a certain distance between
16453 them by inserting nops, for example, if there exists a "costly dependence"
16456 The function estimates the group boundaries that the processor will form as
16457 follows: It keeps track of how many vacant issue slots are available after
16458 each insn. A subsequent insn will start a new group if one of the following
16460 - no more vacant issue slots remain in the current dispatch group.
16461 - only the last issue slot, which is the branch slot, is vacant, but the next
16462 insn is not a branch.
16463 - only the last 2 or less issue slots, including the branch slot, are vacant,
16464 which means that a cracked insn (which occupies two issue slots) can't be
16465 issued in this group.
16466 - less than 'issue_rate' slots are vacant, and the next insn always needs to
16467 start a new group. */
16470 redefine_groups (FILE *dump
, int sched_verbose
, rtx prev_head_insn
, rtx tail
)
16472 rtx insn
, next_insn
;
16474 int can_issue_more
;
16477 int group_count
= 0;
16481 issue_rate
= rs6000_issue_rate ();
16482 group_insns
= alloca (issue_rate
* sizeof (rtx
));
16483 for (i
= 0; i
< issue_rate
; i
++)
16485 group_insns
[i
] = 0;
16487 can_issue_more
= issue_rate
;
16489 insn
= get_next_active_insn (prev_head_insn
, tail
);
16492 while (insn
!= NULL_RTX
)
16494 slot
= (issue_rate
- can_issue_more
);
16495 group_insns
[slot
] = insn
;
16497 rs6000_variable_issue (dump
, sched_verbose
, insn
, can_issue_more
);
16498 if (insn_terminates_group_p (insn
, current_group
))
16499 can_issue_more
= 0;
16501 next_insn
= get_next_active_insn (insn
, tail
);
16502 if (next_insn
== NULL_RTX
)
16503 return group_count
+ 1;
16505 /* Is next_insn going to start a new group? */
16507 = (can_issue_more
== 0
16508 || (can_issue_more
== 1 && !is_branch_slot_insn (next_insn
))
16509 || (can_issue_more
<= 2 && is_cracked_insn (next_insn
))
16510 || (can_issue_more
< issue_rate
&&
16511 insn_terminates_group_p (next_insn
, previous_group
)));
16513 can_issue_more
= force_new_group (sched_verbose
, dump
, group_insns
,
16514 next_insn
, &group_end
, can_issue_more
,
16520 can_issue_more
= 0;
16521 for (i
= 0; i
< issue_rate
; i
++)
16523 group_insns
[i
] = 0;
16527 if (GET_MODE (next_insn
) == TImode
&& can_issue_more
)
16528 PUT_MODE (next_insn
, VOIDmode
);
16529 else if (!can_issue_more
&& GET_MODE (next_insn
) != TImode
)
16530 PUT_MODE (next_insn
, TImode
);
16533 if (can_issue_more
== 0)
16534 can_issue_more
= issue_rate
;
16537 return group_count
;
16540 /* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
16541 dispatch group boundaries that the scheduler had marked. Pad with nops
16542 any dispatch groups which have vacant issue slots, in order to force the
16543 scheduler's grouping on the processor dispatcher. The function
16544 returns the number of dispatch groups found. */
16547 pad_groups (FILE *dump
, int sched_verbose
, rtx prev_head_insn
, rtx tail
)
16549 rtx insn
, next_insn
;
16552 int can_issue_more
;
16554 int group_count
= 0;
16556 /* Initialize issue_rate. */
16557 issue_rate
= rs6000_issue_rate ();
16558 can_issue_more
= issue_rate
;
16560 insn
= get_next_active_insn (prev_head_insn
, tail
);
16561 next_insn
= get_next_active_insn (insn
, tail
);
16563 while (insn
!= NULL_RTX
)
16566 rs6000_variable_issue (dump
, sched_verbose
, insn
, can_issue_more
);
16568 group_end
= (next_insn
== NULL_RTX
|| GET_MODE (next_insn
) == TImode
);
16570 if (next_insn
== NULL_RTX
)
16575 /* If the scheduler had marked group termination at this location
16576 (between insn and next_indn), and neither insn nor next_insn will
16577 force group termination, pad the group with nops to force group
16580 && (rs6000_sched_insert_nops
== sched_finish_pad_groups
)
16581 && !insn_terminates_group_p (insn
, current_group
)
16582 && !insn_terminates_group_p (next_insn
, previous_group
))
16584 if (!is_branch_slot_insn (next_insn
))
16587 while (can_issue_more
)
16590 emit_insn_before (nop
, next_insn
);
16595 can_issue_more
= issue_rate
;
16600 next_insn
= get_next_active_insn (insn
, tail
);
16603 return group_count
;
16606 /* The following function is called at the end of scheduling BB.
16607 After reload, it inserts nops at insn group bundling. */
16610 rs6000_sched_finish (FILE *dump
, int sched_verbose
)
16615 fprintf (dump
, "=== Finishing schedule.\n");
16617 if (reload_completed
&& rs6000_sched_groups
)
16619 if (rs6000_sched_insert_nops
== sched_finish_none
)
16622 if (rs6000_sched_insert_nops
== sched_finish_pad_groups
)
16623 n_groups
= pad_groups (dump
, sched_verbose
,
16624 current_sched_info
->prev_head
,
16625 current_sched_info
->next_tail
);
16627 n_groups
= redefine_groups (dump
, sched_verbose
,
16628 current_sched_info
->prev_head
,
16629 current_sched_info
->next_tail
);
16631 if (sched_verbose
>= 6)
16633 fprintf (dump
, "ngroups = %d\n", n_groups
);
16634 print_rtl (dump
, current_sched_info
->prev_head
);
16635 fprintf (dump
, "Done finish_sched\n");
16640 /* Length in units of the trampoline for entering a nested function. */
16643 rs6000_trampoline_size (void)
16647 switch (DEFAULT_ABI
)
16650 gcc_unreachable ();
16653 ret
= (TARGET_32BIT
) ? 12 : 24;
16658 ret
= (TARGET_32BIT
) ? 40 : 48;
16665 /* Emit RTL insns to initialize the variable parts of a trampoline.
16666 FNADDR is an RTX for the address of the function's pure code.
16667 CXT is an RTX for the static chain value for the function. */
16670 rs6000_initialize_trampoline (rtx addr
, rtx fnaddr
, rtx cxt
)
16672 enum machine_mode pmode
= Pmode
;
16673 int regsize
= (TARGET_32BIT
) ? 4 : 8;
16674 rtx ctx_reg
= force_reg (pmode
, cxt
);
16676 switch (DEFAULT_ABI
)
16679 gcc_unreachable ();
16681 /* Macros to shorten the code expansions below. */
16682 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
16683 #define MEM_PLUS(addr,offset) \
16684 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
16686 /* Under AIX, just build the 3 word function descriptor */
16689 rtx fn_reg
= gen_reg_rtx (pmode
);
16690 rtx toc_reg
= gen_reg_rtx (pmode
);
16691 emit_move_insn (fn_reg
, MEM_DEREF (fnaddr
));
16692 emit_move_insn (toc_reg
, MEM_PLUS (fnaddr
, regsize
));
16693 emit_move_insn (MEM_DEREF (addr
), fn_reg
);
16694 emit_move_insn (MEM_PLUS (addr
, regsize
), toc_reg
);
16695 emit_move_insn (MEM_PLUS (addr
, 2*regsize
), ctx_reg
);
16699 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
16702 emit_library_call (gen_rtx_SYMBOL_REF (SImode
, "__trampoline_setup"),
16703 FALSE
, VOIDmode
, 4,
16705 GEN_INT (rs6000_trampoline_size ()), SImode
,
16715 /* Table of valid machine attributes. */
16717 const struct attribute_spec rs6000_attribute_table
[] =
16719 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
16720 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute
},
16721 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
16722 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
16723 #ifdef SUBTARGET_ATTRIBUTE_TABLE
16724 SUBTARGET_ATTRIBUTE_TABLE
,
16726 { NULL
, 0, 0, false, false, false, NULL
}
16729 /* Handle the "altivec" attribute. The attribute may have
16730 arguments as follows:
16732 __attribute__((altivec(vector__)))
16733 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
16734 __attribute__((altivec(bool__))) (always followed by 'unsigned')
16736 and may appear more than once (e.g., 'vector bool char') in a
16737 given declaration. */
16740 rs6000_handle_altivec_attribute (tree
*node
,
16741 tree name ATTRIBUTE_UNUSED
,
16743 int flags ATTRIBUTE_UNUSED
,
16744 bool *no_add_attrs
)
16746 tree type
= *node
, result
= NULL_TREE
;
16747 enum machine_mode mode
;
16750 = ((args
&& TREE_CODE (args
) == TREE_LIST
&& TREE_VALUE (args
)
16751 && TREE_CODE (TREE_VALUE (args
)) == IDENTIFIER_NODE
)
16752 ? *IDENTIFIER_POINTER (TREE_VALUE (args
))
16755 while (POINTER_TYPE_P (type
)
16756 || TREE_CODE (type
) == FUNCTION_TYPE
16757 || TREE_CODE (type
) == METHOD_TYPE
16758 || TREE_CODE (type
) == ARRAY_TYPE
)
16759 type
= TREE_TYPE (type
);
16761 mode
= TYPE_MODE (type
);
16763 /* Check for invalid AltiVec type qualifiers. */
16764 if (type
== long_unsigned_type_node
|| type
== long_integer_type_node
)
16767 error ("use of %<long%> in AltiVec types is invalid for 64-bit code");
16768 else if (rs6000_warn_altivec_long
)
16769 warning (0, "use of %<long%> in AltiVec types is deprecated; use %<int%>");
16771 else if (type
== long_long_unsigned_type_node
16772 || type
== long_long_integer_type_node
)
16773 error ("use of %<long long%> in AltiVec types is invalid");
16774 else if (type
== double_type_node
)
16775 error ("use of %<double%> in AltiVec types is invalid");
16776 else if (type
== long_double_type_node
)
16777 error ("use of %<long double%> in AltiVec types is invalid");
16778 else if (type
== boolean_type_node
)
16779 error ("use of boolean types in AltiVec types is invalid");
16780 else if (TREE_CODE (type
) == COMPLEX_TYPE
)
16781 error ("use of %<complex%> in AltiVec types is invalid");
16783 switch (altivec_type
)
16786 unsigned_p
= TYPE_UNSIGNED (type
);
16790 result
= (unsigned_p
? unsigned_V4SI_type_node
: V4SI_type_node
);
16793 result
= (unsigned_p
? unsigned_V8HI_type_node
: V8HI_type_node
);
16796 result
= (unsigned_p
? unsigned_V16QI_type_node
: V16QI_type_node
);
16798 case SFmode
: result
= V4SF_type_node
; break;
16799 /* If the user says 'vector int bool', we may be handed the 'bool'
16800 attribute _before_ the 'vector' attribute, and so select the
16801 proper type in the 'b' case below. */
16802 case V4SImode
: case V8HImode
: case V16QImode
: case V4SFmode
:
16810 case SImode
: case V4SImode
: result
= bool_V4SI_type_node
; break;
16811 case HImode
: case V8HImode
: result
= bool_V8HI_type_node
; break;
16812 case QImode
: case V16QImode
: result
= bool_V16QI_type_node
;
16819 case V8HImode
: result
= pixel_V8HI_type_node
;
16825 if (result
&& result
!= type
&& TYPE_READONLY (type
))
16826 result
= build_qualified_type (result
, TYPE_QUAL_CONST
);
16828 *no_add_attrs
= true; /* No need to hang on to the attribute. */
16831 *node
= reconstruct_complex_type (*node
, result
);
16836 /* AltiVec defines four built-in scalar types that serve as vector
16837 elements; we must teach the compiler how to mangle them. */
16839 static const char *
16840 rs6000_mangle_fundamental_type (tree type
)
16842 if (type
== bool_char_type_node
) return "U6__boolc";
16843 if (type
== bool_short_type_node
) return "U6__bools";
16844 if (type
== pixel_type_node
) return "u7__pixel";
16845 if (type
== bool_int_type_node
) return "U6__booli";
16847 /* For all other types, use normal C++ mangling. */
16851 /* Handle a "longcall" or "shortcall" attribute; arguments as in
16852 struct attribute_spec.handler. */
16855 rs6000_handle_longcall_attribute (tree
*node
, tree name
,
16856 tree args ATTRIBUTE_UNUSED
,
16857 int flags ATTRIBUTE_UNUSED
,
16858 bool *no_add_attrs
)
16860 if (TREE_CODE (*node
) != FUNCTION_TYPE
16861 && TREE_CODE (*node
) != FIELD_DECL
16862 && TREE_CODE (*node
) != TYPE_DECL
)
16864 warning (OPT_Wattributes
, "%qs attribute only applies to functions",
16865 IDENTIFIER_POINTER (name
));
16866 *no_add_attrs
= true;
16872 /* Set longcall attributes on all functions declared when
16873 rs6000_default_long_calls is true. */
16875 rs6000_set_default_type_attributes (tree type
)
16877 if (rs6000_default_long_calls
16878 && (TREE_CODE (type
) == FUNCTION_TYPE
16879 || TREE_CODE (type
) == METHOD_TYPE
))
16880 TYPE_ATTRIBUTES (type
) = tree_cons (get_identifier ("longcall"),
16882 TYPE_ATTRIBUTES (type
));
16885 /* Return a reference suitable for calling a function with the
16886 longcall attribute. */
16889 rs6000_longcall_ref (rtx call_ref
)
16891 const char *call_name
;
16894 if (GET_CODE (call_ref
) != SYMBOL_REF
)
16897 /* System V adds '.' to the internal name, so skip them. */
16898 call_name
= XSTR (call_ref
, 0);
16899 if (*call_name
== '.')
16901 while (*call_name
== '.')
16904 node
= get_identifier (call_name
);
16905 call_ref
= gen_rtx_SYMBOL_REF (VOIDmode
, IDENTIFIER_POINTER (node
));
16908 return force_reg (Pmode
, call_ref
);
16911 #ifdef USING_ELFOS_H
16913 /* A C statement or statements to switch to the appropriate section
16914 for output of RTX in mode MODE. You can assume that RTX is some
16915 kind of constant in RTL. The argument MODE is redundant except in
16916 the case of a `const_int' rtx. Select the section by calling
16917 `text_section' or one of the alternatives for other sections.
16919 Do not define this macro if you put all constants in the read-only
16923 rs6000_elf_select_rtx_section (enum machine_mode mode
, rtx x
,
16924 unsigned HOST_WIDE_INT align
)
16926 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
16929 default_elf_select_rtx_section (mode
, x
, align
);
16932 /* A C statement or statements to switch to the appropriate
16933 section for output of DECL. DECL is either a `VAR_DECL' node
16934 or a constant of some sort. RELOC indicates whether forming
16935 the initial value of DECL requires link-time relocations. */
16938 rs6000_elf_select_section (tree decl
, int reloc
,
16939 unsigned HOST_WIDE_INT align
)
16941 /* Pretend that we're always building for a shared library when
16942 ABI_AIX, because otherwise we end up with dynamic relocations
16943 in read-only sections. This happens for function pointers,
16944 references to vtables in typeinfo, and probably other cases. */
16945 default_elf_select_section_1 (decl
, reloc
, align
,
16946 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
16949 /* A C statement to build up a unique section name, expressed as a
16950 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
16951 RELOC indicates whether the initial value of EXP requires
16952 link-time relocations. If you do not define this macro, GCC will use
16953 the symbol name prefixed by `.' as the section name. Note - this
16954 macro can now be called for uninitialized data items as well as
16955 initialized data and functions. */
16958 rs6000_elf_unique_section (tree decl
, int reloc
)
16960 /* As above, pretend that we're always building for a shared library
16961 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
16962 default_unique_section_1 (decl
, reloc
,
16963 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
16966 /* For a SYMBOL_REF, set generic flags and then perform some
16967 target-specific processing.
16969 When the AIX ABI is requested on a non-AIX system, replace the
16970 function name with the real name (with a leading .) rather than the
16971 function descriptor name. This saves a lot of overriding code to
16972 read the prefixes. */
16975 rs6000_elf_encode_section_info (tree decl
, rtx rtl
, int first
)
16977 default_encode_section_info (decl
, rtl
, first
);
16980 && TREE_CODE (decl
) == FUNCTION_DECL
16982 && DEFAULT_ABI
== ABI_AIX
)
16984 rtx sym_ref
= XEXP (rtl
, 0);
16985 size_t len
= strlen (XSTR (sym_ref
, 0));
16986 char *str
= alloca (len
+ 2);
16988 memcpy (str
+ 1, XSTR (sym_ref
, 0), len
+ 1);
16989 XSTR (sym_ref
, 0) = ggc_alloc_string (str
, len
+ 1);
16994 rs6000_elf_in_small_data_p (tree decl
)
16996 if (rs6000_sdata
== SDATA_NONE
)
16999 /* We want to merge strings, so we never consider them small data. */
17000 if (TREE_CODE (decl
) == STRING_CST
)
17003 /* Functions are never in the small data area. */
17004 if (TREE_CODE (decl
) == FUNCTION_DECL
)
17007 if (TREE_CODE (decl
) == VAR_DECL
&& DECL_SECTION_NAME (decl
))
17009 const char *section
= TREE_STRING_POINTER (DECL_SECTION_NAME (decl
));
17010 if (strcmp (section
, ".sdata") == 0
17011 || strcmp (section
, ".sdata2") == 0
17012 || strcmp (section
, ".sbss") == 0
17013 || strcmp (section
, ".sbss2") == 0
17014 || strcmp (section
, ".PPC.EMB.sdata0") == 0
17015 || strcmp (section
, ".PPC.EMB.sbss0") == 0)
17020 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (decl
));
17023 && (unsigned HOST_WIDE_INT
) size
<= g_switch_value
17024 /* If it's not public, and we're not going to reference it there,
17025 there's no need to put it in the small data section. */
17026 && (rs6000_sdata
!= SDATA_DATA
|| TREE_PUBLIC (decl
)))
17033 #endif /* USING_ELFOS_H */
17036 /* Return a REG that occurs in ADDR with coefficient 1.
17037 ADDR can be effectively incremented by incrementing REG.
17039 r0 is special and we must not select it as an address
17040 register by this routine since our caller will try to
17041 increment the returned register via an "la" instruction. */
17044 find_addr_reg (rtx addr
)
17046 while (GET_CODE (addr
) == PLUS
)
17048 if (GET_CODE (XEXP (addr
, 0)) == REG
17049 && REGNO (XEXP (addr
, 0)) != 0)
17050 addr
= XEXP (addr
, 0);
17051 else if (GET_CODE (XEXP (addr
, 1)) == REG
17052 && REGNO (XEXP (addr
, 1)) != 0)
17053 addr
= XEXP (addr
, 1);
17054 else if (CONSTANT_P (XEXP (addr
, 0)))
17055 addr
= XEXP (addr
, 1);
17056 else if (CONSTANT_P (XEXP (addr
, 1)))
17057 addr
= XEXP (addr
, 0);
17059 gcc_unreachable ();
17061 gcc_assert (GET_CODE (addr
) == REG
&& REGNO (addr
) != 0);
17066 rs6000_fatal_bad_address (rtx op
)
17068 fatal_insn ("bad address", op
);
17073 static tree branch_island_list
= 0;
17075 /* Remember to generate a branch island for far calls to the given
17079 add_compiler_branch_island (tree label_name
, tree function_name
,
17082 tree branch_island
= build_tree_list (function_name
, label_name
);
17083 TREE_TYPE (branch_island
) = build_int_cst (NULL_TREE
, line_number
);
17084 TREE_CHAIN (branch_island
) = branch_island_list
;
17085 branch_island_list
= branch_island
;
17088 #define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
17089 #define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
17090 #define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
17091 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
17093 /* Generate far-jump branch islands for everything on the
17094 branch_island_list. Invoked immediately after the last instruction
17095 of the epilogue has been emitted; the branch-islands must be
17096 appended to, and contiguous with, the function body. Mach-O stubs
17097 are generated in machopic_output_stub(). */
17100 macho_branch_islands (void)
17103 tree branch_island
;
17105 for (branch_island
= branch_island_list
;
17107 branch_island
= TREE_CHAIN (branch_island
))
17109 const char *label
=
17110 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island
));
17112 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island
));
17113 char name_buf
[512];
17114 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
17115 if (name
[0] == '*' || name
[0] == '&')
17116 strcpy (name_buf
, name
+1);
17120 strcpy (name_buf
+1, name
);
17122 strcpy (tmp_buf
, "\n");
17123 strcat (tmp_buf
, label
);
17124 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
17125 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
17126 dbxout_stabd (N_SLINE
, BRANCH_ISLAND_LINE_NUMBER (branch_island
));
17127 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
17130 strcat (tmp_buf
, ":\n\tmflr r0\n\tbcl 20,31,");
17131 strcat (tmp_buf
, label
);
17132 strcat (tmp_buf
, "_pic\n");
17133 strcat (tmp_buf
, label
);
17134 strcat (tmp_buf
, "_pic:\n\tmflr r11\n");
17136 strcat (tmp_buf
, "\taddis r11,r11,ha16(");
17137 strcat (tmp_buf
, name_buf
);
17138 strcat (tmp_buf
, " - ");
17139 strcat (tmp_buf
, label
);
17140 strcat (tmp_buf
, "_pic)\n");
17142 strcat (tmp_buf
, "\tmtlr r0\n");
17144 strcat (tmp_buf
, "\taddi r12,r11,lo16(");
17145 strcat (tmp_buf
, name_buf
);
17146 strcat (tmp_buf
, " - ");
17147 strcat (tmp_buf
, label
);
17148 strcat (tmp_buf
, "_pic)\n");
17150 strcat (tmp_buf
, "\tmtctr r12\n\tbctr\n");
17154 strcat (tmp_buf
, ":\nlis r12,hi16(");
17155 strcat (tmp_buf
, name_buf
);
17156 strcat (tmp_buf
, ")\n\tori r12,r12,lo16(");
17157 strcat (tmp_buf
, name_buf
);
17158 strcat (tmp_buf
, ")\n\tmtctr r12\n\tbctr");
17160 output_asm_insn (tmp_buf
, 0);
17161 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
17162 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
17163 dbxout_stabd (N_SLINE
, BRANCH_ISLAND_LINE_NUMBER (branch_island
));
17164 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
17167 branch_island_list
= 0;
17170 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
17171 already there or not. */
17174 no_previous_def (tree function_name
)
17176 tree branch_island
;
17177 for (branch_island
= branch_island_list
;
17179 branch_island
= TREE_CHAIN (branch_island
))
17180 if (function_name
== BRANCH_ISLAND_FUNCTION_NAME (branch_island
))
17185 /* GET_PREV_LABEL gets the label name from the previous definition of
17189 get_prev_label (tree function_name
)
17191 tree branch_island
;
17192 for (branch_island
= branch_island_list
;
17194 branch_island
= TREE_CHAIN (branch_island
))
17195 if (function_name
== BRANCH_ISLAND_FUNCTION_NAME (branch_island
))
17196 return BRANCH_ISLAND_LABEL_NAME (branch_island
);
17200 /* INSN is either a function call or a millicode call. It may have an
17201 unconditional jump in its delay slot.
17203 CALL_DEST is the routine we are calling. */
17206 output_call (rtx insn
, rtx
*operands
, int dest_operand_number
,
17207 int cookie_operand_number
)
17209 static char buf
[256];
17210 if (GET_CODE (operands
[dest_operand_number
]) == SYMBOL_REF
17211 && (INTVAL (operands
[cookie_operand_number
]) & CALL_LONG
))
17214 tree funname
= get_identifier (XSTR (operands
[dest_operand_number
], 0));
17216 if (no_previous_def (funname
))
17218 int line_number
= 0;
17219 rtx label_rtx
= gen_label_rtx ();
17220 char *label_buf
, temp_buf
[256];
17221 ASM_GENERATE_INTERNAL_LABEL (temp_buf
, "L",
17222 CODE_LABEL_NUMBER (label_rtx
));
17223 label_buf
= temp_buf
[0] == '*' ? temp_buf
+ 1 : temp_buf
;
17224 labelname
= get_identifier (label_buf
);
17225 for (; insn
&& GET_CODE (insn
) != NOTE
; insn
= PREV_INSN (insn
));
17227 line_number
= NOTE_LINE_NUMBER (insn
);
17228 add_compiler_branch_island (labelname
, funname
, line_number
);
17231 labelname
= get_prev_label (funname
);
17233 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
17234 instruction will reach 'foo', otherwise link as 'bl L42'".
17235 "L42" should be a 'branch island', that will do a far jump to
17236 'foo'. Branch islands are generated in
17237 macho_branch_islands(). */
17238 sprintf (buf
, "jbsr %%z%d,%.246s",
17239 dest_operand_number
, IDENTIFIER_POINTER (labelname
));
17242 sprintf (buf
, "bl %%z%d", dest_operand_number
);
17246 /* Generate PIC and indirect symbol stubs. */
17249 machopic_output_stub (FILE *file
, const char *symb
, const char *stub
)
17251 unsigned int length
;
17252 char *symbol_name
, *lazy_ptr_name
;
17253 char *local_label_0
;
17254 static int label
= 0;
17256 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
17257 symb
= (*targetm
.strip_name_encoding
) (symb
);
17260 length
= strlen (symb
);
17261 symbol_name
= alloca (length
+ 32);
17262 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name
, symb
, length
);
17264 lazy_ptr_name
= alloca (length
+ 32);
17265 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name
, symb
, length
);
17268 machopic_picsymbol_stub1_section ();
17270 machopic_symbol_stub1_section ();
17274 fprintf (file
, "\t.align 5\n");
17276 fprintf (file
, "%s:\n", stub
);
17277 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
17280 local_label_0
= alloca (sizeof ("\"L00000000000$spb\""));
17281 sprintf (local_label_0
, "\"L%011d$spb\"", label
);
17283 fprintf (file
, "\tmflr r0\n");
17284 fprintf (file
, "\tbcl 20,31,%s\n", local_label_0
);
17285 fprintf (file
, "%s:\n\tmflr r11\n", local_label_0
);
17286 fprintf (file
, "\taddis r11,r11,ha16(%s-%s)\n",
17287 lazy_ptr_name
, local_label_0
);
17288 fprintf (file
, "\tmtlr r0\n");
17289 fprintf (file
, "\t%s r12,lo16(%s-%s)(r11)\n",
17290 (TARGET_64BIT
? "ldu" : "lwzu"),
17291 lazy_ptr_name
, local_label_0
);
17292 fprintf (file
, "\tmtctr r12\n");
17293 fprintf (file
, "\tbctr\n");
17297 fprintf (file
, "\t.align 4\n");
17299 fprintf (file
, "%s:\n", stub
);
17300 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
17302 fprintf (file
, "\tlis r11,ha16(%s)\n", lazy_ptr_name
);
17303 fprintf (file
, "\t%s r12,lo16(%s)(r11)\n",
17304 (TARGET_64BIT
? "ldu" : "lwzu"),
17306 fprintf (file
, "\tmtctr r12\n");
17307 fprintf (file
, "\tbctr\n");
17310 machopic_lazy_symbol_ptr_section ();
17311 fprintf (file
, "%s:\n", lazy_ptr_name
);
17312 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
17313 fprintf (file
, "%sdyld_stub_binding_helper\n",
17314 (TARGET_64BIT
? DOUBLE_INT_ASM_OP
: "\t.long\t"));
17317 /* Legitimize PIC addresses. If the address is already
17318 position-independent, we return ORIG. Newly generated
17319 position-independent addresses go into a reg. This is REG if non
17320 zero, otherwise we allocate register(s) as necessary. */
17322 #define SMALL_INT(X) ((unsigned) (INTVAL (X) + 0x8000) < 0x10000)
17325 rs6000_machopic_legitimize_pic_address (rtx orig
, enum machine_mode mode
,
17330 if (reg
== NULL
&& ! reload_in_progress
&& ! reload_completed
)
17331 reg
= gen_reg_rtx (Pmode
);
17333 if (GET_CODE (orig
) == CONST
)
17337 if (GET_CODE (XEXP (orig
, 0)) == PLUS
17338 && XEXP (XEXP (orig
, 0), 0) == pic_offset_table_rtx
)
17341 gcc_assert (GET_CODE (XEXP (orig
, 0)) == PLUS
);
17343 /* Use a different reg for the intermediate value, as
17344 it will be marked UNCHANGING. */
17345 reg_temp
= no_new_pseudos
? reg
: gen_reg_rtx (Pmode
);
17346 base
= rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 0),
17349 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 1),
17352 if (GET_CODE (offset
) == CONST_INT
)
17354 if (SMALL_INT (offset
))
17355 return plus_constant (base
, INTVAL (offset
));
17356 else if (! reload_in_progress
&& ! reload_completed
)
17357 offset
= force_reg (Pmode
, offset
);
17360 rtx mem
= force_const_mem (Pmode
, orig
);
17361 return machopic_legitimize_pic_address (mem
, Pmode
, reg
);
17364 return gen_rtx_PLUS (Pmode
, base
, offset
);
17367 /* Fall back on generic machopic code. */
17368 return machopic_legitimize_pic_address (orig
, mode
, reg
);
17371 /* This is just a placeholder to make linking work without having to
17372 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
17373 ever needed for Darwin (not too likely!) this would have to get a
17374 real definition. */
17381 /* Output a .machine directive for the Darwin assembler, and call
17382 the generic start_file routine. */
17385 rs6000_darwin_file_start (void)
17387 static const struct
17393 { "ppc64", "ppc64", MASK_64BIT
},
17394 { "970", "ppc970", MASK_PPC_GPOPT
| MASK_MFCRF
| MASK_POWERPC64
},
17395 { "power4", "ppc970", 0 },
17396 { "G5", "ppc970", 0 },
17397 { "7450", "ppc7450", 0 },
17398 { "7400", "ppc7400", MASK_ALTIVEC
},
17399 { "G4", "ppc7400", 0 },
17400 { "750", "ppc750", 0 },
17401 { "740", "ppc750", 0 },
17402 { "G3", "ppc750", 0 },
17403 { "604e", "ppc604e", 0 },
17404 { "604", "ppc604", 0 },
17405 { "603e", "ppc603", 0 },
17406 { "603", "ppc603", 0 },
17407 { "601", "ppc601", 0 },
17408 { NULL
, "ppc", 0 } };
17409 const char *cpu_id
= "";
17412 rs6000_file_start ();
17414 /* Determine the argument to -mcpu=. Default to G3 if not specified. */
17415 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
17416 if (rs6000_select
[i
].set_arch_p
&& rs6000_select
[i
].string
17417 && rs6000_select
[i
].string
[0] != '\0')
17418 cpu_id
= rs6000_select
[i
].string
;
17420 /* Look through the mapping array. Pick the first name that either
17421 matches the argument, has a bit set in IF_SET that is also set
17422 in the target flags, or has a NULL name. */
17425 while (mapping
[i
].arg
!= NULL
17426 && strcmp (mapping
[i
].arg
, cpu_id
) != 0
17427 && (mapping
[i
].if_set
& target_flags
) == 0)
17430 fprintf (asm_out_file
, "\t.machine %s\n", mapping
[i
].name
);
17433 #endif /* TARGET_MACHO */
17436 static unsigned int
17437 rs6000_elf_section_type_flags (tree decl
, const char *name
, int reloc
)
17439 return default_section_type_flags_1 (decl
, name
, reloc
,
17440 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
17443 /* Record an element in the table of global constructors. SYMBOL is
17444 a SYMBOL_REF of the function to be called; PRIORITY is a number
17445 between 0 and MAX_INIT_PRIORITY.
17447 This differs from default_named_section_asm_out_constructor in
17448 that we have special handling for -mrelocatable. */
17451 rs6000_elf_asm_out_constructor (rtx symbol
, int priority
)
17453 const char *section
= ".ctors";
17456 if (priority
!= DEFAULT_INIT_PRIORITY
)
17458 sprintf (buf
, ".ctors.%.5u",
17459 /* Invert the numbering so the linker puts us in the proper
17460 order; constructors are run from right to left, and the
17461 linker sorts in increasing order. */
17462 MAX_INIT_PRIORITY
- priority
);
17466 named_section_flags (section
, SECTION_WRITE
);
17467 assemble_align (POINTER_SIZE
);
17469 if (TARGET_RELOCATABLE
)
17471 fputs ("\t.long (", asm_out_file
);
17472 output_addr_const (asm_out_file
, symbol
);
17473 fputs (")@fixup\n", asm_out_file
);
17476 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
17480 rs6000_elf_asm_out_destructor (rtx symbol
, int priority
)
17482 const char *section
= ".dtors";
17485 if (priority
!= DEFAULT_INIT_PRIORITY
)
17487 sprintf (buf
, ".dtors.%.5u",
17488 /* Invert the numbering so the linker puts us in the proper
17489 order; constructors are run from right to left, and the
17490 linker sorts in increasing order. */
17491 MAX_INIT_PRIORITY
- priority
);
17495 named_section_flags (section
, SECTION_WRITE
);
17496 assemble_align (POINTER_SIZE
);
17498 if (TARGET_RELOCATABLE
)
17500 fputs ("\t.long (", asm_out_file
);
17501 output_addr_const (asm_out_file
, symbol
);
17502 fputs (")@fixup\n", asm_out_file
);
17505 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
17509 rs6000_elf_declare_function_name (FILE *file
, const char *name
, tree decl
)
17513 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file
);
17514 ASM_OUTPUT_LABEL (file
, name
);
17515 fputs (DOUBLE_INT_ASM_OP
, file
);
17516 rs6000_output_function_entry (file
, name
);
17517 fputs (",.TOC.@tocbase,0\n\t.previous\n", file
);
17520 fputs ("\t.size\t", file
);
17521 assemble_name (file
, name
);
17522 fputs (",24\n\t.type\t.", file
);
17523 assemble_name (file
, name
);
17524 fputs (",@function\n", file
);
17525 if (TREE_PUBLIC (decl
) && ! DECL_WEAK (decl
))
17527 fputs ("\t.globl\t.", file
);
17528 assemble_name (file
, name
);
17533 ASM_OUTPUT_TYPE_DIRECTIVE (file
, name
, "function");
17534 ASM_DECLARE_RESULT (file
, DECL_RESULT (decl
));
17535 rs6000_output_function_entry (file
, name
);
17536 fputs (":\n", file
);
17540 if (TARGET_RELOCATABLE
17541 && !TARGET_SECURE_PLT
17542 && (get_pool_size () != 0 || current_function_profile
)
17547 (*targetm
.asm_out
.internal_label
) (file
, "LCL", rs6000_pic_labelno
);
17549 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCTOC", 1);
17550 fprintf (file
, "\t.long ");
17551 assemble_name (file
, buf
);
17553 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCF", rs6000_pic_labelno
);
17554 assemble_name (file
, buf
);
17558 ASM_OUTPUT_TYPE_DIRECTIVE (file
, name
, "function");
17559 ASM_DECLARE_RESULT (file
, DECL_RESULT (decl
));
17561 if (DEFAULT_ABI
== ABI_AIX
)
17563 const char *desc_name
, *orig_name
;
17565 orig_name
= (*targetm
.strip_name_encoding
) (name
);
17566 desc_name
= orig_name
;
17567 while (*desc_name
== '.')
17570 if (TREE_PUBLIC (decl
))
17571 fprintf (file
, "\t.globl %s\n", desc_name
);
17573 fprintf (file
, "%s\n", MINIMAL_TOC_SECTION_ASM_OP
);
17574 fprintf (file
, "%s:\n", desc_name
);
17575 fprintf (file
, "\t.long %s\n", orig_name
);
17576 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file
);
17577 if (DEFAULT_ABI
== ABI_AIX
)
17578 fputs ("\t.long 0\n", file
);
17579 fprintf (file
, "\t.previous\n");
17581 ASM_OUTPUT_LABEL (file
, name
);
17585 rs6000_elf_end_indicate_exec_stack (void)
17588 file_end_indicate_exec_stack ();
17594 rs6000_xcoff_asm_globalize_label (FILE *stream
, const char *name
)
17596 fputs (GLOBAL_ASM_OP
, stream
);
17597 RS6000_OUTPUT_BASENAME (stream
, name
);
17598 putc ('\n', stream
);
17602 rs6000_xcoff_asm_named_section (const char *name
, unsigned int flags
,
17603 tree decl ATTRIBUTE_UNUSED
)
17606 static const char * const suffix
[3] = { "PR", "RO", "RW" };
17608 if (flags
& SECTION_CODE
)
17610 else if (flags
& SECTION_WRITE
)
17615 fprintf (asm_out_file
, "\t.csect %s%s[%s],%u\n",
17616 (flags
& SECTION_CODE
) ? "." : "",
17617 name
, suffix
[smclass
], flags
& SECTION_ENTSIZE
);
17621 rs6000_xcoff_select_section (tree decl
, int reloc
,
17622 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
17624 if (decl_readonly_section_1 (decl
, reloc
, 1))
17626 if (TREE_PUBLIC (decl
))
17627 read_only_data_section ();
17629 read_only_private_data_section ();
17633 if (TREE_PUBLIC (decl
))
17636 private_data_section ();
17641 rs6000_xcoff_unique_section (tree decl
, int reloc ATTRIBUTE_UNUSED
)
17645 /* Use select_section for private and uninitialized data. */
17646 if (!TREE_PUBLIC (decl
)
17647 || DECL_COMMON (decl
)
17648 || DECL_INITIAL (decl
) == NULL_TREE
17649 || DECL_INITIAL (decl
) == error_mark_node
17650 || (flag_zero_initialized_in_bss
17651 && initializer_zerop (DECL_INITIAL (decl
))))
17654 name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
17655 name
= (*targetm
.strip_name_encoding
) (name
);
17656 DECL_SECTION_NAME (decl
) = build_string (strlen (name
), name
);
17659 /* Select section for constant in constant pool.
17661 On RS/6000, all constants are in the private read-only data area.
17662 However, if this is being placed in the TOC it must be output as a
17666 rs6000_xcoff_select_rtx_section (enum machine_mode mode
, rtx x
,
17667 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
17669 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
17672 read_only_private_data_section ();
17675 /* Remove any trailing [DS] or the like from the symbol name. */
17677 static const char *
17678 rs6000_xcoff_strip_name_encoding (const char *name
)
17683 len
= strlen (name
);
17684 if (name
[len
- 1] == ']')
17685 return ggc_alloc_string (name
, len
- 4);
17690 /* Section attributes. AIX is always PIC. */
17692 static unsigned int
17693 rs6000_xcoff_section_type_flags (tree decl
, const char *name
, int reloc
)
17695 unsigned int align
;
17696 unsigned int flags
= default_section_type_flags_1 (decl
, name
, reloc
, 1);
17698 /* Align to at least UNIT size. */
17699 if (flags
& SECTION_CODE
)
17700 align
= MIN_UNITS_PER_WORD
;
17702 /* Increase alignment of large objects if not already stricter. */
17703 align
= MAX ((DECL_ALIGN (decl
) / BITS_PER_UNIT
),
17704 int_size_in_bytes (TREE_TYPE (decl
)) > MIN_UNITS_PER_WORD
17705 ? UNITS_PER_FP_WORD
: MIN_UNITS_PER_WORD
);
17707 return flags
| (exact_log2 (align
) & SECTION_ENTSIZE
);
17710 /* Output at beginning of assembler file.
17712 Initialize the section names for the RS/6000 at this point.
17714 Specify filename, including full path, to assembler.
17716 We want to go into the TOC section so at least one .toc will be emitted.
17717 Also, in order to output proper .bs/.es pairs, we need at least one static
17718 [RW] section emitted.
17720 Finally, declare mcount when profiling to make the assembler happy. */
17723 rs6000_xcoff_file_start (void)
17725 rs6000_gen_section_name (&xcoff_bss_section_name
,
17726 main_input_filename
, ".bss_");
17727 rs6000_gen_section_name (&xcoff_private_data_section_name
,
17728 main_input_filename
, ".rw_");
17729 rs6000_gen_section_name (&xcoff_read_only_section_name
,
17730 main_input_filename
, ".ro_");
17732 fputs ("\t.file\t", asm_out_file
);
17733 output_quoted_string (asm_out_file
, main_input_filename
);
17734 fputc ('\n', asm_out_file
);
17735 if (write_symbols
!= NO_DEBUG
)
17736 private_data_section ();
17739 fprintf (asm_out_file
, "\t.extern %s\n", RS6000_MCOUNT
);
17740 rs6000_file_start ();
17743 /* Output at end of assembler file.
17744 On the RS/6000, referencing data should automatically pull in text. */
17747 rs6000_xcoff_file_end (void)
17750 fputs ("_section_.text:\n", asm_out_file
);
17752 fputs (TARGET_32BIT
17753 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
17756 #endif /* TARGET_XCOFF */
17758 /* Compute a (partial) cost for rtx X. Return true if the complete
17759 cost has been computed, and false if subexpressions should be
17760 scanned. In either case, *TOTAL contains the cost result. */
17763 rs6000_rtx_costs (rtx x
, int code
, int outer_code
, int *total
)
17765 enum machine_mode mode
= GET_MODE (x
);
17769 /* On the RS/6000, if it is valid in the insn, it is free. */
17771 if (((outer_code
== SET
17772 || outer_code
== PLUS
17773 || outer_code
== MINUS
)
17774 && (CONST_OK_FOR_LETTER_P (INTVAL (x
), 'I')
17775 || CONST_OK_FOR_LETTER_P (INTVAL (x
), 'L')))
17776 || (outer_code
== AND
17777 && (CONST_OK_FOR_LETTER_P (INTVAL (x
), 'K')
17778 || (CONST_OK_FOR_LETTER_P (INTVAL (x
),
17779 mode
== SImode
? 'L' : 'J'))
17780 || mask_operand (x
, VOIDmode
)))
17781 || ((outer_code
== IOR
|| outer_code
== XOR
)
17782 && (CONST_OK_FOR_LETTER_P (INTVAL (x
), 'K')
17783 || (CONST_OK_FOR_LETTER_P (INTVAL (x
),
17784 mode
== SImode
? 'L' : 'J'))))
17785 || outer_code
== ASHIFT
17786 || outer_code
== ASHIFTRT
17787 || outer_code
== LSHIFTRT
17788 || outer_code
== ROTATE
17789 || outer_code
== ROTATERT
17790 || outer_code
== ZERO_EXTRACT
17791 || (outer_code
== MULT
17792 && CONST_OK_FOR_LETTER_P (INTVAL (x
), 'I'))
17793 || ((outer_code
== DIV
|| outer_code
== UDIV
17794 || outer_code
== MOD
|| outer_code
== UMOD
)
17795 && exact_log2 (INTVAL (x
)) >= 0)
17796 || (outer_code
== COMPARE
17797 && (CONST_OK_FOR_LETTER_P (INTVAL (x
), 'I')
17798 || CONST_OK_FOR_LETTER_P (INTVAL (x
), 'K')))
17799 || (outer_code
== EQ
17800 && (CONST_OK_FOR_LETTER_P (INTVAL (x
), 'I')
17801 || CONST_OK_FOR_LETTER_P (INTVAL (x
), 'K')
17802 || (CONST_OK_FOR_LETTER_P (INTVAL (x
),
17803 mode
== SImode
? 'L' : 'J'))))
17804 || (outer_code
== GTU
17805 && CONST_OK_FOR_LETTER_P (INTVAL (x
), 'I'))
17806 || (outer_code
== LTU
17807 && CONST_OK_FOR_LETTER_P (INTVAL (x
), 'P')))
17812 else if ((outer_code
== PLUS
17813 && reg_or_add_cint_operand (x
, VOIDmode
))
17814 || (outer_code
== MINUS
17815 && reg_or_sub_cint_operand (x
, VOIDmode
))
17816 || ((outer_code
== SET
17817 || outer_code
== IOR
17818 || outer_code
== XOR
)
17820 & ~ (unsigned HOST_WIDE_INT
) 0xffffffff) == 0))
17822 *total
= COSTS_N_INSNS (1);
17829 && ((outer_code
== AND
17830 && (CONST_OK_FOR_LETTER_P (INTVAL (x
), 'K')
17831 || CONST_OK_FOR_LETTER_P (INTVAL (x
), 'L')
17832 || mask_operand (x
, DImode
)))
17833 || ((outer_code
== IOR
|| outer_code
== XOR
)
17834 && CONST_DOUBLE_HIGH (x
) == 0
17835 && (CONST_DOUBLE_LOW (x
)
17836 & ~ (unsigned HOST_WIDE_INT
) 0xffff) == 0)))
17841 else if (mode
== DImode
17842 && (outer_code
== SET
17843 || outer_code
== IOR
17844 || outer_code
== XOR
)
17845 && CONST_DOUBLE_HIGH (x
) == 0)
17847 *total
= COSTS_N_INSNS (1);
17856 /* When optimizing for size, MEM should be slightly more expensive
17857 than generating address, e.g., (plus (reg) (const)).
17858 L1 cache latency is about two instructions. */
17859 *total
= optimize_size
? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
17867 if (mode
== DFmode
)
17869 if (GET_CODE (XEXP (x
, 0)) == MULT
)
17871 /* FNMA accounted in outer NEG. */
17872 if (outer_code
== NEG
)
17873 *total
= rs6000_cost
->dmul
- rs6000_cost
->fp
;
17875 *total
= rs6000_cost
->dmul
;
17878 *total
= rs6000_cost
->fp
;
17880 else if (mode
== SFmode
)
17882 /* FNMA accounted in outer NEG. */
17883 if (outer_code
== NEG
&& GET_CODE (XEXP (x
, 0)) == MULT
)
17886 *total
= rs6000_cost
->fp
;
17888 else if (GET_CODE (XEXP (x
, 0)) == MULT
)
17890 /* The rs6000 doesn't have shift-and-add instructions. */
17891 rs6000_rtx_costs (XEXP (x
, 0), MULT
, PLUS
, total
);
17892 *total
+= COSTS_N_INSNS (1);
17895 *total
= COSTS_N_INSNS (1);
17899 if (mode
== DFmode
)
17901 if (GET_CODE (XEXP (x
, 0)) == MULT
)
17903 /* FNMA accounted in outer NEG. */
17904 if (outer_code
== NEG
)
17907 *total
= rs6000_cost
->dmul
;
17910 *total
= rs6000_cost
->fp
;
17912 else if (mode
== SFmode
)
17914 /* FNMA accounted in outer NEG. */
17915 if (outer_code
== NEG
&& GET_CODE (XEXP (x
, 0)) == MULT
)
17918 *total
= rs6000_cost
->fp
;
17920 else if (GET_CODE (XEXP (x
, 0)) == MULT
)
17922 /* The rs6000 doesn't have shift-and-sub instructions. */
17923 rs6000_rtx_costs (XEXP (x
, 0), MULT
, MINUS
, total
);
17924 *total
+= COSTS_N_INSNS (1);
17927 *total
= COSTS_N_INSNS (1);
17931 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
17932 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (x
, 1)), 'I'))
17934 if (INTVAL (XEXP (x
, 1)) >= -256
17935 && INTVAL (XEXP (x
, 1)) <= 255)
17936 *total
= rs6000_cost
->mulsi_const9
;
17938 *total
= rs6000_cost
->mulsi_const
;
17940 /* FMA accounted in outer PLUS/MINUS. */
17941 else if ((mode
== DFmode
|| mode
== SFmode
)
17942 && (outer_code
== PLUS
|| outer_code
== MINUS
))
17944 else if (mode
== DFmode
)
17945 *total
= rs6000_cost
->dmul
;
17946 else if (mode
== SFmode
)
17947 *total
= rs6000_cost
->fp
;
17948 else if (mode
== DImode
)
17949 *total
= rs6000_cost
->muldi
;
17951 *total
= rs6000_cost
->mulsi
;
17956 if (FLOAT_MODE_P (mode
))
17958 *total
= mode
== DFmode
? rs6000_cost
->ddiv
17959 : rs6000_cost
->sdiv
;
17966 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
17967 && exact_log2 (INTVAL (XEXP (x
, 1))) >= 0)
17969 if (code
== DIV
|| code
== MOD
)
17971 *total
= COSTS_N_INSNS (2);
17974 *total
= COSTS_N_INSNS (1);
17978 if (GET_MODE (XEXP (x
, 1)) == DImode
)
17979 *total
= rs6000_cost
->divdi
;
17981 *total
= rs6000_cost
->divsi
;
17983 /* Add in shift and subtract for MOD. */
17984 if (code
== MOD
|| code
== UMOD
)
17985 *total
+= COSTS_N_INSNS (2);
17989 *total
= COSTS_N_INSNS (4);
17993 if (outer_code
== AND
|| outer_code
== IOR
|| outer_code
== XOR
)
18004 *total
= COSTS_N_INSNS (1);
18012 /* Handle mul_highpart. */
18013 if (outer_code
== TRUNCATE
18014 && GET_CODE (XEXP (x
, 0)) == MULT
)
18016 if (mode
== DImode
)
18017 *total
= rs6000_cost
->muldi
;
18019 *total
= rs6000_cost
->mulsi
;
18022 else if (outer_code
== AND
)
18025 *total
= COSTS_N_INSNS (1);
18030 if (GET_CODE (XEXP (x
, 0)) == MEM
)
18033 *total
= COSTS_N_INSNS (1);
18039 if (!FLOAT_MODE_P (mode
))
18041 *total
= COSTS_N_INSNS (1);
18047 case UNSIGNED_FLOAT
:
18051 case FLOAT_TRUNCATE
:
18052 *total
= rs6000_cost
->fp
;
18056 switch (XINT (x
, 1))
18059 *total
= rs6000_cost
->fp
;
18071 *total
= COSTS_N_INSNS (1);
18074 else if (FLOAT_MODE_P (mode
)
18075 && TARGET_PPC_GFXOPT
&& TARGET_HARD_FLOAT
&& TARGET_FPRS
)
18077 *total
= rs6000_cost
->fp
;
18085 /* Carry bit requires mode == Pmode.
18086 NEG or PLUS already counted so only add one. */
18088 && (outer_code
== NEG
|| outer_code
== PLUS
))
18090 *total
= COSTS_N_INSNS (1);
18093 if (outer_code
== SET
)
18095 if (XEXP (x
, 1) == const0_rtx
)
18097 *total
= COSTS_N_INSNS (2);
18100 else if (mode
== Pmode
)
18102 *total
= COSTS_N_INSNS (3);
18111 if (outer_code
== SET
&& (XEXP (x
, 1) == const0_rtx
))
18113 *total
= COSTS_N_INSNS (2);
18117 if (outer_code
== COMPARE
)
18131 /* A C expression returning the cost of moving data from a register of class
18132 CLASS1 to one of CLASS2. */
18135 rs6000_register_move_cost (enum machine_mode mode
,
18136 enum reg_class from
, enum reg_class to
)
18138 /* Moves from/to GENERAL_REGS. */
18139 if (reg_classes_intersect_p (to
, GENERAL_REGS
)
18140 || reg_classes_intersect_p (from
, GENERAL_REGS
))
18142 if (! reg_classes_intersect_p (to
, GENERAL_REGS
))
18145 if (from
== FLOAT_REGS
|| from
== ALTIVEC_REGS
)
18146 return (rs6000_memory_move_cost (mode
, from
, 0)
18147 + rs6000_memory_move_cost (mode
, GENERAL_REGS
, 0));
18149 /* It's more expensive to move CR_REGS than CR0_REGS because of the
18151 else if (from
== CR_REGS
)
18155 /* A move will cost one instruction per GPR moved. */
18156 return 2 * hard_regno_nregs
[0][mode
];
18159 /* Moving between two similar registers is just one instruction. */
18160 else if (reg_classes_intersect_p (to
, from
))
18161 return mode
== TFmode
? 4 : 2;
18163 /* Everything else has to go through GENERAL_REGS. */
18165 return (rs6000_register_move_cost (mode
, GENERAL_REGS
, to
)
18166 + rs6000_register_move_cost (mode
, from
, GENERAL_REGS
));
18169 /* A C expressions returning the cost of moving data of MODE from a register to
18173 rs6000_memory_move_cost (enum machine_mode mode
, enum reg_class
class,
18174 int in ATTRIBUTE_UNUSED
)
18176 if (reg_classes_intersect_p (class, GENERAL_REGS
))
18177 return 4 * hard_regno_nregs
[0][mode
];
18178 else if (reg_classes_intersect_p (class, FLOAT_REGS
))
18179 return 4 * hard_regno_nregs
[32][mode
];
18180 else if (reg_classes_intersect_p (class, ALTIVEC_REGS
))
18181 return 4 * hard_regno_nregs
[FIRST_ALTIVEC_REGNO
][mode
];
18183 return 4 + rs6000_register_move_cost (mode
, class, GENERAL_REGS
);
18186 /* Newton-Raphson approximation of single-precision floating point divide n/d.
18187 Assumes no trapping math and finite arguments. */
18190 rs6000_emit_swdivsf (rtx res
, rtx n
, rtx d
)
18192 rtx x0
, e0
, e1
, y1
, u0
, v0
, one
;
18194 x0
= gen_reg_rtx (SFmode
);
18195 e0
= gen_reg_rtx (SFmode
);
18196 e1
= gen_reg_rtx (SFmode
);
18197 y1
= gen_reg_rtx (SFmode
);
18198 u0
= gen_reg_rtx (SFmode
);
18199 v0
= gen_reg_rtx (SFmode
);
18200 one
= force_reg (SFmode
, CONST_DOUBLE_FROM_REAL_VALUE (dconst1
, SFmode
));
18202 /* x0 = 1./d estimate */
18203 emit_insn (gen_rtx_SET (VOIDmode
, x0
,
18204 gen_rtx_UNSPEC (SFmode
, gen_rtvec (1, d
),
18206 /* e0 = 1. - d * x0 */
18207 emit_insn (gen_rtx_SET (VOIDmode
, e0
,
18208 gen_rtx_MINUS (SFmode
, one
,
18209 gen_rtx_MULT (SFmode
, d
, x0
))));
18210 /* e1 = e0 + e0 * e0 */
18211 emit_insn (gen_rtx_SET (VOIDmode
, e1
,
18212 gen_rtx_PLUS (SFmode
,
18213 gen_rtx_MULT (SFmode
, e0
, e0
), e0
)));
18214 /* y1 = x0 + e1 * x0 */
18215 emit_insn (gen_rtx_SET (VOIDmode
, y1
,
18216 gen_rtx_PLUS (SFmode
,
18217 gen_rtx_MULT (SFmode
, e1
, x0
), x0
)));
18219 emit_insn (gen_rtx_SET (VOIDmode
, u0
,
18220 gen_rtx_MULT (SFmode
, n
, y1
)));
18221 /* v0 = n - d * u0 */
18222 emit_insn (gen_rtx_SET (VOIDmode
, v0
,
18223 gen_rtx_MINUS (SFmode
, n
,
18224 gen_rtx_MULT (SFmode
, d
, u0
))));
18225 /* res = u0 + v0 * y1 */
18226 emit_insn (gen_rtx_SET (VOIDmode
, res
,
18227 gen_rtx_PLUS (SFmode
,
18228 gen_rtx_MULT (SFmode
, v0
, y1
), u0
)));
18231 /* Newton-Raphson approximation of double-precision floating point divide n/d.
18232 Assumes no trapping math and finite arguments. */
18235 rs6000_emit_swdivdf (rtx res
, rtx n
, rtx d
)
18237 rtx x0
, e0
, e1
, e2
, y1
, y2
, y3
, u0
, v0
, one
;
18239 x0
= gen_reg_rtx (DFmode
);
18240 e0
= gen_reg_rtx (DFmode
);
18241 e1
= gen_reg_rtx (DFmode
);
18242 e2
= gen_reg_rtx (DFmode
);
18243 y1
= gen_reg_rtx (DFmode
);
18244 y2
= gen_reg_rtx (DFmode
);
18245 y3
= gen_reg_rtx (DFmode
);
18246 u0
= gen_reg_rtx (DFmode
);
18247 v0
= gen_reg_rtx (DFmode
);
18248 one
= force_reg (DFmode
, CONST_DOUBLE_FROM_REAL_VALUE (dconst1
, DFmode
));
18250 /* x0 = 1./d estimate */
18251 emit_insn (gen_rtx_SET (VOIDmode
, x0
,
18252 gen_rtx_UNSPEC (DFmode
, gen_rtvec (1, d
),
18254 /* e0 = 1. - d * x0 */
18255 emit_insn (gen_rtx_SET (VOIDmode
, e0
,
18256 gen_rtx_MINUS (DFmode
, one
,
18257 gen_rtx_MULT (SFmode
, d
, x0
))));
18258 /* y1 = x0 + e0 * x0 */
18259 emit_insn (gen_rtx_SET (VOIDmode
, y1
,
18260 gen_rtx_PLUS (DFmode
,
18261 gen_rtx_MULT (DFmode
, e0
, x0
), x0
)));
18263 emit_insn (gen_rtx_SET (VOIDmode
, e1
,
18264 gen_rtx_MULT (DFmode
, e0
, e0
)));
18265 /* y2 = y1 + e1 * y1 */
18266 emit_insn (gen_rtx_SET (VOIDmode
, y2
,
18267 gen_rtx_PLUS (DFmode
,
18268 gen_rtx_MULT (DFmode
, e1
, y1
), y1
)));
18270 emit_insn (gen_rtx_SET (VOIDmode
, e2
,
18271 gen_rtx_MULT (DFmode
, e1
, e1
)));
18272 /* y3 = y2 + e2 * y2 */
18273 emit_insn (gen_rtx_SET (VOIDmode
, y3
,
18274 gen_rtx_PLUS (DFmode
,
18275 gen_rtx_MULT (DFmode
, e2
, y2
), y2
)));
18277 emit_insn (gen_rtx_SET (VOIDmode
, u0
,
18278 gen_rtx_MULT (DFmode
, n
, y3
)));
18279 /* v0 = n - d * u0 */
18280 emit_insn (gen_rtx_SET (VOIDmode
, v0
,
18281 gen_rtx_MINUS (DFmode
, n
,
18282 gen_rtx_MULT (DFmode
, d
, u0
))));
18283 /* res = u0 + v0 * y3 */
18284 emit_insn (gen_rtx_SET (VOIDmode
, res
,
18285 gen_rtx_PLUS (DFmode
,
18286 gen_rtx_MULT (DFmode
, v0
, y3
), u0
)));
18289 /* Return an RTX representing where to find the function value of a
18290 function returning MODE. */
18292 rs6000_complex_function_value (enum machine_mode mode
)
18294 unsigned int regno
;
18296 enum machine_mode inner
= GET_MODE_INNER (mode
);
18297 unsigned int inner_bytes
= GET_MODE_SIZE (inner
);
18299 if (FLOAT_MODE_P (mode
) && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
18300 regno
= FP_ARG_RETURN
;
18303 regno
= GP_ARG_RETURN
;
18305 /* 32-bit is OK since it'll go in r3/r4. */
18306 if (TARGET_32BIT
&& inner_bytes
>= 4)
18307 return gen_rtx_REG (mode
, regno
);
18310 if (inner_bytes
>= 8)
18311 return gen_rtx_REG (mode
, regno
);
18313 r1
= gen_rtx_EXPR_LIST (inner
, gen_rtx_REG (inner
, regno
),
18315 r2
= gen_rtx_EXPR_LIST (inner
, gen_rtx_REG (inner
, regno
+ 1),
18316 GEN_INT (inner_bytes
));
18317 return gen_rtx_PARALLEL (mode
, gen_rtvec (2, r1
, r2
));
18320 /* Define how to find the value returned by a function.
18321 VALTYPE is the data type of the value (as a tree).
18322 If the precise function being called is known, FUNC is its FUNCTION_DECL;
18323 otherwise, FUNC is 0.
18325 On the SPE, both FPs and vectors are returned in r3.
18327 On RS/6000 an integer value is in r3 and a floating-point value is in
18328 fp1, unless -msoft-float. */
18331 rs6000_function_value (tree valtype
, tree func ATTRIBUTE_UNUSED
)
18333 enum machine_mode mode
;
18334 unsigned int regno
;
18336 /* Special handling for structs in darwin64. */
18337 if (rs6000_darwin64_abi
18338 && TYPE_MODE (valtype
) == BLKmode
18339 && TREE_CODE (valtype
) == RECORD_TYPE
18340 && int_size_in_bytes (valtype
) > 0)
18342 CUMULATIVE_ARGS valcum
;
18346 valcum
.fregno
= FP_ARG_MIN_REG
;
18347 valcum
.vregno
= ALTIVEC_ARG_MIN_REG
;
18348 /* Do a trial code generation as if this were going to be passed as
18349 an argument; if any part goes in memory, we return NULL. */
18350 valret
= rs6000_darwin64_record_arg (&valcum
, valtype
, 1, true);
18353 /* Otherwise fall through to standard ABI rules. */
18356 if (TARGET_32BIT
&& TARGET_POWERPC64
&& TYPE_MODE (valtype
) == DImode
)
18358 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
18359 return gen_rtx_PARALLEL (DImode
,
18361 gen_rtx_EXPR_LIST (VOIDmode
,
18362 gen_rtx_REG (SImode
, GP_ARG_RETURN
),
18364 gen_rtx_EXPR_LIST (VOIDmode
,
18365 gen_rtx_REG (SImode
,
18366 GP_ARG_RETURN
+ 1),
18370 if ((INTEGRAL_TYPE_P (valtype
)
18371 && TYPE_PRECISION (valtype
) < BITS_PER_WORD
)
18372 || POINTER_TYPE_P (valtype
))
18373 mode
= TARGET_32BIT
? SImode
: DImode
;
18375 mode
= TYPE_MODE (valtype
);
18377 if (SCALAR_FLOAT_TYPE_P (valtype
) && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
18378 regno
= FP_ARG_RETURN
;
18379 else if (TREE_CODE (valtype
) == COMPLEX_TYPE
18380 && targetm
.calls
.split_complex_arg
)
18381 return rs6000_complex_function_value (mode
);
18382 else if (TREE_CODE (valtype
) == VECTOR_TYPE
18383 && TARGET_ALTIVEC
&& TARGET_ALTIVEC_ABI
18384 && ALTIVEC_VECTOR_MODE (mode
))
18385 regno
= ALTIVEC_ARG_RETURN
;
18386 else if (TARGET_E500_DOUBLE
&& TARGET_HARD_FLOAT
18387 && (mode
== DFmode
|| mode
== DCmode
))
18388 return spe_build_register_parallel (mode
, GP_ARG_RETURN
);
18390 regno
= GP_ARG_RETURN
;
18392 return gen_rtx_REG (mode
, regno
);
18395 /* Define how to find the value returned by a library function
18396 assuming the value has mode MODE. */
18398 rs6000_libcall_value (enum machine_mode mode
)
18400 unsigned int regno
;
18402 if (TARGET_32BIT
&& TARGET_POWERPC64
&& mode
== DImode
)
18404 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
18405 return gen_rtx_PARALLEL (DImode
,
18407 gen_rtx_EXPR_LIST (VOIDmode
,
18408 gen_rtx_REG (SImode
, GP_ARG_RETURN
),
18410 gen_rtx_EXPR_LIST (VOIDmode
,
18411 gen_rtx_REG (SImode
,
18412 GP_ARG_RETURN
+ 1),
18416 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
18417 && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
18418 regno
= FP_ARG_RETURN
;
18419 else if (ALTIVEC_VECTOR_MODE (mode
)
18420 && TARGET_ALTIVEC
&& TARGET_ALTIVEC_ABI
)
18421 regno
= ALTIVEC_ARG_RETURN
;
18422 else if (COMPLEX_MODE_P (mode
) && targetm
.calls
.split_complex_arg
)
18423 return rs6000_complex_function_value (mode
);
18424 else if (TARGET_E500_DOUBLE
&& TARGET_HARD_FLOAT
18425 && (mode
== DFmode
|| mode
== DCmode
))
18426 return spe_build_register_parallel (mode
, GP_ARG_RETURN
);
18428 regno
= GP_ARG_RETURN
;
18430 return gen_rtx_REG (mode
, regno
);
18433 /* Define the offset between two registers, FROM to be eliminated and its
18434 replacement TO, at the start of a routine. */
18436 rs6000_initial_elimination_offset (int from
, int to
)
18438 rs6000_stack_t
*info
= rs6000_stack_info ();
18439 HOST_WIDE_INT offset
;
18441 if (from
== HARD_FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
18442 offset
= info
->push_p
? 0 : -info
->total_size
;
18443 else if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
18445 offset
= info
->push_p
? 0 : -info
->total_size
;
18446 if (FRAME_GROWS_DOWNWARD
)
18447 offset
+= info
->fixed_size
+ info
->vars_size
+ info
->parm_size
;
18449 else if (from
== FRAME_POINTER_REGNUM
&& to
== HARD_FRAME_POINTER_REGNUM
)
18450 offset
= FRAME_GROWS_DOWNWARD
18451 ? info
->fixed_size
+ info
->vars_size
+ info
->parm_size
18453 else if (from
== ARG_POINTER_REGNUM
&& to
== HARD_FRAME_POINTER_REGNUM
)
18454 offset
= info
->total_size
;
18455 else if (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
18456 offset
= info
->push_p
? info
->total_size
: 0;
18457 else if (from
== RS6000_PIC_OFFSET_TABLE_REGNUM
)
18460 gcc_unreachable ();
18465 /* Return true if TYPE is a SPE or AltiVec opaque type. */
18468 rs6000_is_opaque_type (tree type
)
18470 return (type
== opaque_V2SI_type_node
18471 || type
== opaque_V2SF_type_node
18472 || type
== opaque_p_V2SI_type_node
18473 || type
== opaque_V4SI_type_node
);
18477 rs6000_dwarf_register_span (rtx reg
)
18482 && (SPE_VECTOR_MODE (GET_MODE (reg
))
18483 || (TARGET_E500_DOUBLE
&& GET_MODE (reg
) == DFmode
)))
18488 regno
= REGNO (reg
);
18490 /* The duality of the SPE register size wreaks all kinds of havoc.
18491 This is a way of distinguishing r0 in 32-bits from r0 in
18494 gen_rtx_PARALLEL (VOIDmode
,
18497 gen_rtx_REG (SImode
, regno
+ 1200),
18498 gen_rtx_REG (SImode
, regno
))
18500 gen_rtx_REG (SImode
, regno
),
18501 gen_rtx_REG (SImode
, regno
+ 1200)));
18504 /* Map internal gcc register numbers to DWARF2 register numbers. */
18507 rs6000_dbx_register_number (unsigned int regno
)
18509 if (regno
<= 63 || write_symbols
!= DWARF2_DEBUG
)
18511 if (regno
== MQ_REGNO
)
18513 if (regno
== LINK_REGISTER_REGNUM
)
18515 if (regno
== COUNT_REGISTER_REGNUM
)
18517 if (CR_REGNO_P (regno
))
18518 return regno
- CR0_REGNO
+ 86;
18519 if (regno
== XER_REGNO
)
18521 if (ALTIVEC_REGNO_P (regno
))
18522 return regno
- FIRST_ALTIVEC_REGNO
+ 1124;
18523 if (regno
== VRSAVE_REGNO
)
18525 if (regno
== VSCR_REGNO
)
18527 if (regno
== SPE_ACC_REGNO
)
18529 if (regno
== SPEFSCR_REGNO
)
18531 /* SPE high reg number. We get these values of regno from
18532 rs6000_dwarf_register_span. */
18533 gcc_assert (regno
>= 1200 && regno
< 1232);
18537 /* target hook eh_return_filter_mode */
18538 static enum machine_mode
18539 rs6000_eh_return_filter_mode (void)
18541 return TARGET_32BIT
? SImode
: word_mode
;
18544 /* Target hook for vector_mode_supported_p. */
18546 rs6000_vector_mode_supported_p (enum machine_mode mode
)
18549 if (TARGET_SPE
&& SPE_VECTOR_MODE (mode
))
18552 else if (TARGET_ALTIVEC
&& ALTIVEC_VECTOR_MODE (mode
))
18559 /* Target hook for invalid_arg_for_unprototyped_fn. */
18560 static const char *
18561 invalid_arg_for_unprototyped_fn (tree typelist
, tree funcdecl
, tree val
)
18563 return (!rs6000_darwin64_abi
18565 && TREE_CODE (TREE_TYPE (val
)) == VECTOR_TYPE
18566 && (funcdecl
== NULL_TREE
18567 || (TREE_CODE (funcdecl
) == FUNCTION_DECL
18568 && DECL_BUILT_IN_CLASS (funcdecl
) != BUILT_IN_MD
)))
18569 ? N_("AltiVec argument passed to unprototyped function")
18573 /* For TARGET_SECURE_PLT 32-bit PIC code we can save PIC register
18574 setup by using __stack_chk_fail_local hidden function instead of
18575 calling __stack_chk_fail directly. Otherwise it is better to call
18576 __stack_chk_fail directly. */
18579 rs6000_stack_protect_fail (void)
18581 return (DEFAULT_ABI
== ABI_V4
&& TARGET_SECURE_PLT
&& flag_pic
)
18582 ? default_hidden_stack_protect_fail ()
18583 : default_external_stack_protect_fail ();
18586 #include "gt-rs6000.h"