1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 #include "cfglayout.h"
54 #include "sched-int.h"
55 #include "tree-gimple.h"
57 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
60 #ifndef TARGET_NO_PROTOTYPE
61 #define TARGET_NO_PROTOTYPE 0
64 #define EASY_VECTOR_15(n) ((n) >= -16 && (n) <= 15)
65 #define EASY_VECTOR_15_ADD_SELF(n) ((n) >= 0x10 && (n) <= 0x1e \
68 #define min(A,B) ((A) < (B) ? (A) : (B))
69 #define max(A,B) ((A) > (B) ? (A) : (B))
71 /* Structure used to define the rs6000 stack */
72 typedef struct rs6000_stack
{
73 int first_gp_reg_save
; /* first callee saved GP register used */
74 int first_fp_reg_save
; /* first callee saved FP register used */
75 int first_altivec_reg_save
; /* first callee saved AltiVec register used */
76 int lr_save_p
; /* true if the link reg needs to be saved */
77 int cr_save_p
; /* true if the CR reg needs to be saved */
78 unsigned int vrsave_mask
; /* mask of vec registers to save */
79 int toc_save_p
; /* true if the TOC needs to be saved */
80 int push_p
; /* true if we need to allocate stack space */
81 int calls_p
; /* true if the function makes any calls */
82 int world_save_p
; /* true if we're saving *everything*:
83 r13-r31, cr, f14-f31, vrsave, v20-v31 */
84 enum rs6000_abi abi
; /* which ABI to use */
85 int gp_save_offset
; /* offset to save GP regs from initial SP */
86 int fp_save_offset
; /* offset to save FP regs from initial SP */
87 int altivec_save_offset
; /* offset to save AltiVec regs from initial SP */
88 int lr_save_offset
; /* offset to save LR from initial SP */
89 int cr_save_offset
; /* offset to save CR from initial SP */
90 int vrsave_save_offset
; /* offset to save VRSAVE from initial SP */
91 int spe_gp_save_offset
; /* offset to save spe 64-bit gprs */
92 int toc_save_offset
; /* offset to save the TOC pointer */
93 int varargs_save_offset
; /* offset to save the varargs registers */
94 int ehrd_offset
; /* offset to EH return data */
95 int reg_size
; /* register size (4 or 8) */
96 int varargs_size
; /* size to hold V.4 args passed in regs */
97 HOST_WIDE_INT vars_size
; /* variable save area size */
98 int parm_size
; /* outgoing parameter size */
99 int save_size
; /* save area size */
100 int fixed_size
; /* fixed size of stack frame */
101 int gp_size
; /* size of saved GP registers */
102 int fp_size
; /* size of saved FP registers */
103 int altivec_size
; /* size of saved AltiVec registers */
104 int cr_size
; /* size to hold CR if not in save_size */
105 int lr_size
; /* size to hold LR if not in save_size */
106 int vrsave_size
; /* size to hold VRSAVE if not in save_size */
107 int altivec_padding_size
; /* size of altivec alignment padding if
109 int spe_gp_size
; /* size of 64-bit GPR save size for SPE */
110 int spe_padding_size
;
111 int toc_size
; /* size to hold TOC if not in save_size */
112 HOST_WIDE_INT total_size
; /* total bytes allocated for stack */
113 int spe_64bit_regs_used
;
116 /* Target cpu type */
118 enum processor_type rs6000_cpu
;
119 struct rs6000_cpu_select rs6000_select
[3] =
121 /* switch name, tune arch */
122 { (const char *)0, "--with-cpu=", 1, 1 },
123 { (const char *)0, "-mcpu=", 1, 1 },
124 { (const char *)0, "-mtune=", 1, 0 },
127 /* Always emit branch hint bits. */
128 static GTY(()) bool rs6000_always_hint
;
130 /* Schedule instructions for group formation. */
131 static GTY(()) bool rs6000_sched_groups
;
133 /* Support adjust_priority scheduler hook
134 and -mprioritize-restricted-insns= option. */
135 const char *rs6000_sched_restricted_insns_priority_str
;
136 int rs6000_sched_restricted_insns_priority
;
138 /* Support for -msched-costly-dep option. */
139 const char *rs6000_sched_costly_dep_str
;
140 enum rs6000_dependence_cost rs6000_sched_costly_dep
;
142 /* Support for -minsert-sched-nops option. */
143 const char *rs6000_sched_insert_nops_str
;
144 enum rs6000_nop_insertion rs6000_sched_insert_nops
;
146 /* Support targetm.vectorize.builtin_mask_for_load. */
147 tree altivec_builtin_mask_for_load
;
148 /* Support targetm.vectorize.builtin_mask_for_store. */
149 tree altivec_builtin_mask_for_store
;
151 /* Size of long double */
152 const char *rs6000_long_double_size_string
;
153 int rs6000_long_double_type_size
;
155 /* Whether -mabi=altivec has appeared */
156 int rs6000_altivec_abi
;
158 /* Whether VRSAVE instructions should be generated. */
159 int rs6000_altivec_vrsave
;
161 /* String from -mvrsave= option. */
162 const char *rs6000_altivec_vrsave_string
;
164 /* Nonzero if we want SPE ABI extensions. */
167 /* Whether isel instructions should be generated. */
170 /* Whether SPE simd instructions should be generated. */
173 /* Nonzero if floating point operations are done in the GPRs. */
174 int rs6000_float_gprs
= 0;
176 /* String from -mfloat-gprs=. */
177 const char *rs6000_float_gprs_string
;
179 /* String from -misel=. */
180 const char *rs6000_isel_string
;
182 /* String from -mspe=. */
183 const char *rs6000_spe_string
;
185 /* Set to nonzero once AIX common-mode calls have been defined. */
186 static GTY(()) int common_mode_defined
;
188 /* Save information from a "cmpxx" operation until the branch or scc is
190 rtx rs6000_compare_op0
, rs6000_compare_op1
;
191 int rs6000_compare_fp_p
;
193 /* Label number of label created for -mrelocatable, to call to so we can
194 get the address of the GOT section */
195 int rs6000_pic_labelno
;
198 /* Which abi to adhere to */
199 const char *rs6000_abi_name
;
201 /* Semantics of the small data area */
202 enum rs6000_sdata_type rs6000_sdata
= SDATA_DATA
;
204 /* Which small data model to use */
205 const char *rs6000_sdata_name
= (char *)0;
207 /* Counter for labels which are to be placed in .fixup. */
208 int fixuplabelno
= 0;
211 /* Bit size of immediate TLS offsets and string from which it is decoded. */
212 int rs6000_tls_size
= 32;
213 const char *rs6000_tls_size_string
;
215 /* ABI enumeration available for subtarget to use. */
216 enum rs6000_abi rs6000_current_abi
;
218 /* ABI string from -mabi= option. */
219 const char *rs6000_abi_string
;
221 /* Whether to use variant of AIX ABI for PowerPC64 Linux. */
225 const char *rs6000_debug_name
;
226 int rs6000_debug_stack
; /* debug stack applications */
227 int rs6000_debug_arg
; /* debug argument handling */
229 /* Value is TRUE if register/mode pair is accepatable. */
230 bool rs6000_hard_regno_mode_ok_p
[NUM_MACHINE_MODES
][FIRST_PSEUDO_REGISTER
];
233 static GTY(()) tree opaque_V2SI_type_node
;
234 static GTY(()) tree opaque_V2SF_type_node
;
235 static GTY(()) tree opaque_p_V2SI_type_node
;
236 static GTY(()) tree V16QI_type_node
;
237 static GTY(()) tree V2SI_type_node
;
238 static GTY(()) tree V2SF_type_node
;
239 static GTY(()) tree V4HI_type_node
;
240 static GTY(()) tree V4SI_type_node
;
241 static GTY(()) tree V4SF_type_node
;
242 static GTY(()) tree V8HI_type_node
;
243 static GTY(()) tree unsigned_V16QI_type_node
;
244 static GTY(()) tree unsigned_V8HI_type_node
;
245 static GTY(()) tree unsigned_V4SI_type_node
;
246 static GTY(()) tree bool_char_type_node
; /* __bool char */
247 static GTY(()) tree bool_short_type_node
; /* __bool short */
248 static GTY(()) tree bool_int_type_node
; /* __bool int */
249 static GTY(()) tree pixel_type_node
; /* __pixel */
250 static GTY(()) tree bool_V16QI_type_node
; /* __vector __bool char */
251 static GTY(()) tree bool_V8HI_type_node
; /* __vector __bool short */
252 static GTY(()) tree bool_V4SI_type_node
; /* __vector __bool int */
253 static GTY(()) tree pixel_V8HI_type_node
; /* __vector __pixel */
255 int rs6000_warn_altivec_long
= 1; /* On by default. */
256 const char *rs6000_warn_altivec_long_switch
;
258 const char *rs6000_traceback_name
;
260 traceback_default
= 0,
266 /* Flag to say the TOC is initialized */
268 char toc_label_name
[10];
270 /* Alias set for saves and restores from the rs6000 stack. */
271 static GTY(()) int rs6000_sr_alias_set
;
273 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
274 The only place that looks at this is rs6000_set_default_type_attributes;
275 everywhere else should rely on the presence or absence of a longcall
276 attribute on the function declaration. */
277 int rs6000_default_long_calls
;
278 const char *rs6000_longcall_switch
;
280 /* Control alignment for fields within structures. */
281 /* String from -malign-XXXXX. */
282 const char *rs6000_alignment_string
;
283 int rs6000_alignment_flags
;
285 struct builtin_description
287 /* mask is not const because we're going to alter it below. This
288 nonsense will go away when we rewrite the -march infrastructure
289 to give us more target flag bits. */
291 const enum insn_code icode
;
292 const char *const name
;
293 const enum rs6000_builtins code
;
296 /* Target cpu costs. */
298 struct processor_costs
{
299 const int mulsi
; /* cost of SImode multiplication. */
300 const int mulsi_const
; /* cost of SImode multiplication by constant. */
301 const int mulsi_const9
; /* cost of SImode mult by short constant. */
302 const int muldi
; /* cost of DImode multiplication. */
303 const int divsi
; /* cost of SImode division. */
304 const int divdi
; /* cost of DImode division. */
305 const int fp
; /* cost of simple SFmode and DFmode insns. */
306 const int dmul
; /* cost of DFmode multiplication (and fmadd). */
307 const int sdiv
; /* cost of SFmode division (fdivs). */
308 const int ddiv
; /* cost of DFmode division (fdiv). */
311 const struct processor_costs
*rs6000_cost
;
313 /* Processor costs (relative to an add) */
315 /* Instruction size costs on 32bit processors. */
317 struct processor_costs size32_cost
= {
318 COSTS_N_INSNS (1), /* mulsi */
319 COSTS_N_INSNS (1), /* mulsi_const */
320 COSTS_N_INSNS (1), /* mulsi_const9 */
321 COSTS_N_INSNS (1), /* muldi */
322 COSTS_N_INSNS (1), /* divsi */
323 COSTS_N_INSNS (1), /* divdi */
324 COSTS_N_INSNS (1), /* fp */
325 COSTS_N_INSNS (1), /* dmul */
326 COSTS_N_INSNS (1), /* sdiv */
327 COSTS_N_INSNS (1), /* ddiv */
330 /* Instruction size costs on 64bit processors. */
332 struct processor_costs size64_cost
= {
333 COSTS_N_INSNS (1), /* mulsi */
334 COSTS_N_INSNS (1), /* mulsi_const */
335 COSTS_N_INSNS (1), /* mulsi_const9 */
336 COSTS_N_INSNS (1), /* muldi */
337 COSTS_N_INSNS (1), /* divsi */
338 COSTS_N_INSNS (1), /* divdi */
339 COSTS_N_INSNS (1), /* fp */
340 COSTS_N_INSNS (1), /* dmul */
341 COSTS_N_INSNS (1), /* sdiv */
342 COSTS_N_INSNS (1), /* ddiv */
345 /* Instruction costs on RIOS1 processors. */
347 struct processor_costs rios1_cost
= {
348 COSTS_N_INSNS (5), /* mulsi */
349 COSTS_N_INSNS (4), /* mulsi_const */
350 COSTS_N_INSNS (3), /* mulsi_const9 */
351 COSTS_N_INSNS (5), /* muldi */
352 COSTS_N_INSNS (19), /* divsi */
353 COSTS_N_INSNS (19), /* divdi */
354 COSTS_N_INSNS (2), /* fp */
355 COSTS_N_INSNS (2), /* dmul */
356 COSTS_N_INSNS (19), /* sdiv */
357 COSTS_N_INSNS (19), /* ddiv */
360 /* Instruction costs on RIOS2 processors. */
362 struct processor_costs rios2_cost
= {
363 COSTS_N_INSNS (2), /* mulsi */
364 COSTS_N_INSNS (2), /* mulsi_const */
365 COSTS_N_INSNS (2), /* mulsi_const9 */
366 COSTS_N_INSNS (2), /* muldi */
367 COSTS_N_INSNS (13), /* divsi */
368 COSTS_N_INSNS (13), /* divdi */
369 COSTS_N_INSNS (2), /* fp */
370 COSTS_N_INSNS (2), /* dmul */
371 COSTS_N_INSNS (17), /* sdiv */
372 COSTS_N_INSNS (17), /* ddiv */
375 /* Instruction costs on RS64A processors. */
377 struct processor_costs rs64a_cost
= {
378 COSTS_N_INSNS (20), /* mulsi */
379 COSTS_N_INSNS (12), /* mulsi_const */
380 COSTS_N_INSNS (8), /* mulsi_const9 */
381 COSTS_N_INSNS (34), /* muldi */
382 COSTS_N_INSNS (65), /* divsi */
383 COSTS_N_INSNS (67), /* divdi */
384 COSTS_N_INSNS (4), /* fp */
385 COSTS_N_INSNS (4), /* dmul */
386 COSTS_N_INSNS (31), /* sdiv */
387 COSTS_N_INSNS (31), /* ddiv */
390 /* Instruction costs on MPCCORE processors. */
392 struct processor_costs mpccore_cost
= {
393 COSTS_N_INSNS (2), /* mulsi */
394 COSTS_N_INSNS (2), /* mulsi_const */
395 COSTS_N_INSNS (2), /* mulsi_const9 */
396 COSTS_N_INSNS (2), /* muldi */
397 COSTS_N_INSNS (6), /* divsi */
398 COSTS_N_INSNS (6), /* divdi */
399 COSTS_N_INSNS (4), /* fp */
400 COSTS_N_INSNS (5), /* dmul */
401 COSTS_N_INSNS (10), /* sdiv */
402 COSTS_N_INSNS (17), /* ddiv */
405 /* Instruction costs on PPC403 processors. */
407 struct processor_costs ppc403_cost
= {
408 COSTS_N_INSNS (4), /* mulsi */
409 COSTS_N_INSNS (4), /* mulsi_const */
410 COSTS_N_INSNS (4), /* mulsi_const9 */
411 COSTS_N_INSNS (4), /* muldi */
412 COSTS_N_INSNS (33), /* divsi */
413 COSTS_N_INSNS (33), /* divdi */
414 COSTS_N_INSNS (11), /* fp */
415 COSTS_N_INSNS (11), /* dmul */
416 COSTS_N_INSNS (11), /* sdiv */
417 COSTS_N_INSNS (11), /* ddiv */
420 /* Instruction costs on PPC405 processors. */
422 struct processor_costs ppc405_cost
= {
423 COSTS_N_INSNS (5), /* mulsi */
424 COSTS_N_INSNS (4), /* mulsi_const */
425 COSTS_N_INSNS (3), /* mulsi_const9 */
426 COSTS_N_INSNS (5), /* muldi */
427 COSTS_N_INSNS (35), /* divsi */
428 COSTS_N_INSNS (35), /* divdi */
429 COSTS_N_INSNS (11), /* fp */
430 COSTS_N_INSNS (11), /* dmul */
431 COSTS_N_INSNS (11), /* sdiv */
432 COSTS_N_INSNS (11), /* ddiv */
435 /* Instruction costs on PPC440 processors. */
437 struct processor_costs ppc440_cost
= {
438 COSTS_N_INSNS (3), /* mulsi */
439 COSTS_N_INSNS (2), /* mulsi_const */
440 COSTS_N_INSNS (2), /* mulsi_const9 */
441 COSTS_N_INSNS (3), /* muldi */
442 COSTS_N_INSNS (34), /* divsi */
443 COSTS_N_INSNS (34), /* divdi */
444 COSTS_N_INSNS (5), /* fp */
445 COSTS_N_INSNS (5), /* dmul */
446 COSTS_N_INSNS (19), /* sdiv */
447 COSTS_N_INSNS (33), /* ddiv */
450 /* Instruction costs on PPC601 processors. */
452 struct processor_costs ppc601_cost
= {
453 COSTS_N_INSNS (5), /* mulsi */
454 COSTS_N_INSNS (5), /* mulsi_const */
455 COSTS_N_INSNS (5), /* mulsi_const9 */
456 COSTS_N_INSNS (5), /* muldi */
457 COSTS_N_INSNS (36), /* divsi */
458 COSTS_N_INSNS (36), /* divdi */
459 COSTS_N_INSNS (4), /* fp */
460 COSTS_N_INSNS (5), /* dmul */
461 COSTS_N_INSNS (17), /* sdiv */
462 COSTS_N_INSNS (31), /* ddiv */
465 /* Instruction costs on PPC603 processors. */
467 struct processor_costs ppc603_cost
= {
468 COSTS_N_INSNS (5), /* mulsi */
469 COSTS_N_INSNS (3), /* mulsi_const */
470 COSTS_N_INSNS (2), /* mulsi_const9 */
471 COSTS_N_INSNS (5), /* muldi */
472 COSTS_N_INSNS (37), /* divsi */
473 COSTS_N_INSNS (37), /* divdi */
474 COSTS_N_INSNS (3), /* fp */
475 COSTS_N_INSNS (4), /* dmul */
476 COSTS_N_INSNS (18), /* sdiv */
477 COSTS_N_INSNS (33), /* ddiv */
480 /* Instruction costs on PPC604 processors. */
482 struct processor_costs ppc604_cost
= {
483 COSTS_N_INSNS (4), /* mulsi */
484 COSTS_N_INSNS (4), /* mulsi_const */
485 COSTS_N_INSNS (4), /* mulsi_const9 */
486 COSTS_N_INSNS (4), /* muldi */
487 COSTS_N_INSNS (20), /* divsi */
488 COSTS_N_INSNS (20), /* divdi */
489 COSTS_N_INSNS (3), /* fp */
490 COSTS_N_INSNS (3), /* dmul */
491 COSTS_N_INSNS (18), /* sdiv */
492 COSTS_N_INSNS (32), /* ddiv */
495 /* Instruction costs on PPC604e processors. */
497 struct processor_costs ppc604e_cost
= {
498 COSTS_N_INSNS (2), /* mulsi */
499 COSTS_N_INSNS (2), /* mulsi_const */
500 COSTS_N_INSNS (2), /* mulsi_const9 */
501 COSTS_N_INSNS (2), /* muldi */
502 COSTS_N_INSNS (20), /* divsi */
503 COSTS_N_INSNS (20), /* divdi */
504 COSTS_N_INSNS (3), /* fp */
505 COSTS_N_INSNS (3), /* dmul */
506 COSTS_N_INSNS (18), /* sdiv */
507 COSTS_N_INSNS (32), /* ddiv */
510 /* Instruction costs on PPC620 processors. */
512 struct processor_costs ppc620_cost
= {
513 COSTS_N_INSNS (5), /* mulsi */
514 COSTS_N_INSNS (4), /* mulsi_const */
515 COSTS_N_INSNS (3), /* mulsi_const9 */
516 COSTS_N_INSNS (7), /* muldi */
517 COSTS_N_INSNS (21), /* divsi */
518 COSTS_N_INSNS (37), /* divdi */
519 COSTS_N_INSNS (3), /* fp */
520 COSTS_N_INSNS (3), /* dmul */
521 COSTS_N_INSNS (18), /* sdiv */
522 COSTS_N_INSNS (32), /* ddiv */
525 /* Instruction costs on PPC630 processors. */
527 struct processor_costs ppc630_cost
= {
528 COSTS_N_INSNS (5), /* mulsi */
529 COSTS_N_INSNS (4), /* mulsi_const */
530 COSTS_N_INSNS (3), /* mulsi_const9 */
531 COSTS_N_INSNS (7), /* muldi */
532 COSTS_N_INSNS (21), /* divsi */
533 COSTS_N_INSNS (37), /* divdi */
534 COSTS_N_INSNS (3), /* fp */
535 COSTS_N_INSNS (3), /* dmul */
536 COSTS_N_INSNS (17), /* sdiv */
537 COSTS_N_INSNS (21), /* ddiv */
540 /* Instruction costs on PPC750 and PPC7400 processors. */
542 struct processor_costs ppc750_cost
= {
543 COSTS_N_INSNS (5), /* mulsi */
544 COSTS_N_INSNS (3), /* mulsi_const */
545 COSTS_N_INSNS (2), /* mulsi_const9 */
546 COSTS_N_INSNS (5), /* muldi */
547 COSTS_N_INSNS (17), /* divsi */
548 COSTS_N_INSNS (17), /* divdi */
549 COSTS_N_INSNS (3), /* fp */
550 COSTS_N_INSNS (3), /* dmul */
551 COSTS_N_INSNS (17), /* sdiv */
552 COSTS_N_INSNS (31), /* ddiv */
555 /* Instruction costs on PPC7450 processors. */
557 struct processor_costs ppc7450_cost
= {
558 COSTS_N_INSNS (4), /* mulsi */
559 COSTS_N_INSNS (3), /* mulsi_const */
560 COSTS_N_INSNS (3), /* mulsi_const9 */
561 COSTS_N_INSNS (4), /* muldi */
562 COSTS_N_INSNS (23), /* divsi */
563 COSTS_N_INSNS (23), /* divdi */
564 COSTS_N_INSNS (5), /* fp */
565 COSTS_N_INSNS (5), /* dmul */
566 COSTS_N_INSNS (21), /* sdiv */
567 COSTS_N_INSNS (35), /* ddiv */
570 /* Instruction costs on PPC8540 processors. */
572 struct processor_costs ppc8540_cost
= {
573 COSTS_N_INSNS (4), /* mulsi */
574 COSTS_N_INSNS (4), /* mulsi_const */
575 COSTS_N_INSNS (4), /* mulsi_const9 */
576 COSTS_N_INSNS (4), /* muldi */
577 COSTS_N_INSNS (19), /* divsi */
578 COSTS_N_INSNS (19), /* divdi */
579 COSTS_N_INSNS (4), /* fp */
580 COSTS_N_INSNS (4), /* dmul */
581 COSTS_N_INSNS (29), /* sdiv */
582 COSTS_N_INSNS (29), /* ddiv */
585 /* Instruction costs on POWER4 and POWER5 processors. */
587 struct processor_costs power4_cost
= {
588 COSTS_N_INSNS (3), /* mulsi */
589 COSTS_N_INSNS (2), /* mulsi_const */
590 COSTS_N_INSNS (2), /* mulsi_const9 */
591 COSTS_N_INSNS (4), /* muldi */
592 COSTS_N_INSNS (18), /* divsi */
593 COSTS_N_INSNS (34), /* divdi */
594 COSTS_N_INSNS (3), /* fp */
595 COSTS_N_INSNS (3), /* dmul */
596 COSTS_N_INSNS (17), /* sdiv */
597 COSTS_N_INSNS (17), /* ddiv */
601 static bool rs6000_function_ok_for_sibcall (tree
, tree
);
602 static int num_insns_constant_wide (HOST_WIDE_INT
);
603 static void validate_condition_mode (enum rtx_code
, enum machine_mode
);
604 static rtx
rs6000_generate_compare (enum rtx_code
);
605 static void rs6000_maybe_dead (rtx
);
606 static void rs6000_emit_stack_tie (void);
607 static void rs6000_frame_related (rtx
, rtx
, HOST_WIDE_INT
, rtx
, rtx
);
608 static rtx
spe_synthesize_frame_save (rtx
);
609 static bool spe_func_has_64bit_regs_p (void);
610 static void emit_frame_save (rtx
, rtx
, enum machine_mode
, unsigned int,
612 static rtx
gen_frame_mem_offset (enum machine_mode
, rtx
, int);
613 static void rs6000_emit_allocate_stack (HOST_WIDE_INT
, int);
614 static unsigned rs6000_hash_constant (rtx
);
615 static unsigned toc_hash_function (const void *);
616 static int toc_hash_eq (const void *, const void *);
617 static int constant_pool_expr_1 (rtx
, int *, int *);
618 static bool constant_pool_expr_p (rtx
);
619 static bool toc_relative_expr_p (rtx
);
620 static bool legitimate_small_data_p (enum machine_mode
, rtx
);
621 static bool legitimate_indexed_address_p (rtx
, int);
622 static bool legitimate_indirect_address_p (rtx
, int);
623 static bool macho_lo_sum_memory_operand (rtx x
, enum machine_mode mode
);
624 static bool legitimate_lo_sum_address_p (enum machine_mode
, rtx
, int);
625 static struct machine_function
* rs6000_init_machine_status (void);
626 static bool rs6000_assemble_integer (rtx
, unsigned int, int);
627 #ifdef HAVE_GAS_HIDDEN
628 static void rs6000_assemble_visibility (tree
, int);
630 static int rs6000_ra_ever_killed (void);
631 static tree
rs6000_handle_longcall_attribute (tree
*, tree
, tree
, int, bool *);
632 static tree
rs6000_handle_altivec_attribute (tree
*, tree
, tree
, int, bool *);
633 static void rs6000_eliminate_indexed_memrefs (rtx operands
[2]);
634 static const char *rs6000_mangle_fundamental_type (tree
);
635 extern const struct attribute_spec rs6000_attribute_table
[];
636 static void rs6000_set_default_type_attributes (tree
);
637 static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT
);
638 static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT
);
639 static void rs6000_output_mi_thunk (FILE *, tree
, HOST_WIDE_INT
, HOST_WIDE_INT
,
641 static rtx
rs6000_emit_set_long_const (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
);
642 static bool rs6000_return_in_memory (tree
, tree
);
643 static void rs6000_file_start (void);
645 static unsigned int rs6000_elf_section_type_flags (tree
, const char *, int);
646 static void rs6000_elf_asm_out_constructor (rtx
, int);
647 static void rs6000_elf_asm_out_destructor (rtx
, int);
648 static void rs6000_elf_select_section (tree
, int, unsigned HOST_WIDE_INT
);
649 static void rs6000_elf_unique_section (tree
, int);
650 static void rs6000_elf_select_rtx_section (enum machine_mode
, rtx
,
651 unsigned HOST_WIDE_INT
);
652 static void rs6000_elf_encode_section_info (tree
, rtx
, int)
654 static bool rs6000_elf_in_small_data_p (tree
);
657 static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
658 static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree
);
659 static void rs6000_xcoff_select_section (tree
, int, unsigned HOST_WIDE_INT
);
660 static void rs6000_xcoff_unique_section (tree
, int);
661 static void rs6000_xcoff_select_rtx_section (enum machine_mode
, rtx
,
662 unsigned HOST_WIDE_INT
);
663 static const char * rs6000_xcoff_strip_name_encoding (const char *);
664 static unsigned int rs6000_xcoff_section_type_flags (tree
, const char *, int);
665 static void rs6000_xcoff_file_start (void);
666 static void rs6000_xcoff_file_end (void);
669 static bool rs6000_binds_local_p (tree
);
671 static int rs6000_variable_issue (FILE *, int, rtx
, int);
672 static bool rs6000_rtx_costs (rtx
, int, int, int *);
673 static int rs6000_adjust_cost (rtx
, rtx
, rtx
, int);
674 static bool is_microcoded_insn (rtx
);
675 static int is_dispatch_slot_restricted (rtx
);
676 static bool is_cracked_insn (rtx
);
677 static bool is_branch_slot_insn (rtx
);
678 static int rs6000_adjust_priority (rtx
, int);
679 static int rs6000_issue_rate (void);
680 static bool rs6000_is_costly_dependence (rtx
, rtx
, rtx
, int, int);
681 static rtx
get_next_active_insn (rtx
, rtx
);
682 static bool insn_terminates_group_p (rtx
, enum group_termination
);
683 static bool is_costly_group (rtx
*, rtx
);
684 static int force_new_group (int, FILE *, rtx
*, rtx
, bool *, int, int *);
685 static int redefine_groups (FILE *, int, rtx
, rtx
);
686 static int pad_groups (FILE *, int, rtx
, rtx
);
687 static void rs6000_sched_finish (FILE *, int);
688 static int rs6000_use_sched_lookahead (void);
689 static tree
rs6000_builtin_mask_for_load (void);
690 static tree
rs6000_builtin_mask_for_store (void);
692 static void rs6000_init_builtins (void);
693 static rtx
rs6000_expand_unop_builtin (enum insn_code
, tree
, rtx
);
694 static rtx
rs6000_expand_binop_builtin (enum insn_code
, tree
, rtx
);
695 static rtx
rs6000_expand_ternop_builtin (enum insn_code
, tree
, rtx
);
696 static rtx
rs6000_expand_builtin (tree
, rtx
, rtx
, enum machine_mode
, int);
697 static void altivec_init_builtins (void);
698 static void rs6000_common_init_builtins (void);
699 static void rs6000_init_libfuncs (void);
701 static void enable_mask_for_builtins (struct builtin_description
*, int,
702 enum rs6000_builtins
,
703 enum rs6000_builtins
);
704 static tree
build_opaque_vector_type (tree
, int);
705 static void spe_init_builtins (void);
706 static rtx
spe_expand_builtin (tree
, rtx
, bool *);
707 static rtx
spe_expand_stv_builtin (enum insn_code
, tree
);
708 static rtx
spe_expand_predicate_builtin (enum insn_code
, tree
, rtx
);
709 static rtx
spe_expand_evsel_builtin (enum insn_code
, tree
, rtx
);
710 static int rs6000_emit_int_cmove (rtx
, rtx
, rtx
, rtx
);
711 static rs6000_stack_t
*rs6000_stack_info (void);
712 static void debug_stack_info (rs6000_stack_t
*);
714 static rtx
altivec_expand_builtin (tree
, rtx
, bool *);
715 static rtx
altivec_expand_ld_builtin (tree
, rtx
, bool *);
716 static rtx
altivec_expand_st_builtin (tree
, rtx
, bool *);
717 static rtx
altivec_expand_dst_builtin (tree
, rtx
, bool *);
718 static rtx
altivec_expand_abs_builtin (enum insn_code
, tree
, rtx
);
719 static rtx
altivec_expand_predicate_builtin (enum insn_code
,
720 const char *, tree
, rtx
);
721 static rtx
altivec_expand_lv_builtin (enum insn_code
, tree
, rtx
);
722 static rtx
altivec_expand_stv_builtin (enum insn_code
, tree
);
723 static void rs6000_parse_abi_options (void);
724 static void rs6000_parse_alignment_option (void);
725 static void rs6000_parse_tls_size_option (void);
726 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
727 static int first_altivec_reg_to_save (void);
728 static unsigned int compute_vrsave_mask (void);
729 static void compute_save_world_info(rs6000_stack_t
*info_ptr
);
730 static void is_altivec_return_reg (rtx
, void *);
731 static rtx
generate_set_vrsave (rtx
, rs6000_stack_t
*, int);
732 int easy_vector_constant (rtx
, enum machine_mode
);
733 static int easy_vector_same (rtx
, enum machine_mode
);
734 static int easy_vector_splat_const (int, enum machine_mode
);
735 static bool is_ev64_opaque_type (tree
);
736 static rtx
rs6000_dwarf_register_span (rtx
);
737 static rtx
rs6000_legitimize_tls_address (rtx
, enum tls_model
);
738 static rtx
rs6000_tls_get_addr (void);
739 static rtx
rs6000_got_sym (void);
740 static inline int rs6000_tls_symbol_ref_1 (rtx
*, void *);
741 static const char *rs6000_get_some_local_dynamic_name (void);
742 static int rs6000_get_some_local_dynamic_name_1 (rtx
*, void *);
743 static rtx
rs6000_complex_function_value (enum machine_mode
);
744 static rtx
rs6000_spe_function_arg (CUMULATIVE_ARGS
*,
745 enum machine_mode
, tree
);
746 static rtx
rs6000_mixed_function_arg (enum machine_mode
, tree
, int);
747 static void rs6000_move_block_from_reg (int regno
, rtx x
, int nregs
);
748 static void setup_incoming_varargs (CUMULATIVE_ARGS
*,
749 enum machine_mode
, tree
,
751 static bool rs6000_pass_by_reference (CUMULATIVE_ARGS
*, enum machine_mode
,
754 static void macho_branch_islands (void);
755 static void add_compiler_branch_island (tree
, tree
, int);
756 static int no_previous_def (tree function_name
);
757 static tree
get_prev_label (tree function_name
);
760 static tree
rs6000_build_builtin_va_list (void);
761 static tree
rs6000_gimplify_va_arg (tree
, tree
, tree
*, tree
*);
762 static bool rs6000_must_pass_in_stack (enum machine_mode
, tree
);
763 static bool rs6000_vector_mode_supported_p (enum machine_mode
);
765 static enum machine_mode
rs6000_eh_return_filter_mode (void);
767 /* Hash table stuff for keeping track of TOC entries. */
769 struct toc_hash_struct
GTY(())
771 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
772 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
774 enum machine_mode key_mode
;
778 static GTY ((param_is (struct toc_hash_struct
))) htab_t toc_hash_table
;
780 /* Default register names. */
781 char rs6000_reg_names
[][8] =
783 "0", "1", "2", "3", "4", "5", "6", "7",
784 "8", "9", "10", "11", "12", "13", "14", "15",
785 "16", "17", "18", "19", "20", "21", "22", "23",
786 "24", "25", "26", "27", "28", "29", "30", "31",
787 "0", "1", "2", "3", "4", "5", "6", "7",
788 "8", "9", "10", "11", "12", "13", "14", "15",
789 "16", "17", "18", "19", "20", "21", "22", "23",
790 "24", "25", "26", "27", "28", "29", "30", "31",
791 "mq", "lr", "ctr","ap",
792 "0", "1", "2", "3", "4", "5", "6", "7",
794 /* AltiVec registers. */
795 "0", "1", "2", "3", "4", "5", "6", "7",
796 "8", "9", "10", "11", "12", "13", "14", "15",
797 "16", "17", "18", "19", "20", "21", "22", "23",
798 "24", "25", "26", "27", "28", "29", "30", "31",
804 #ifdef TARGET_REGNAMES
805 static const char alt_reg_names
[][8] =
807 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
808 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
809 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
810 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
811 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
812 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
813 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
814 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
815 "mq", "lr", "ctr", "ap",
816 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
818 /* AltiVec registers. */
819 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
820 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
821 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
822 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
829 #ifndef MASK_STRICT_ALIGN
830 #define MASK_STRICT_ALIGN 0
832 #ifndef TARGET_PROFILE_KERNEL
833 #define TARGET_PROFILE_KERNEL 0
836 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
837 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
839 /* Return 1 for a symbol ref for a thread-local storage symbol. */
840 #define RS6000_SYMBOL_REF_TLS_P(RTX) \
841 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
843 /* Initialize the GCC target structure. */
844 #undef TARGET_ATTRIBUTE_TABLE
845 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
846 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
847 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
849 #undef TARGET_ASM_ALIGNED_DI_OP
850 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
852 /* Default unaligned ops are only provided for ELF. Find the ops needed
853 for non-ELF systems. */
854 #ifndef OBJECT_FORMAT_ELF
856 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
858 #undef TARGET_ASM_UNALIGNED_HI_OP
859 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
860 #undef TARGET_ASM_UNALIGNED_SI_OP
861 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
862 #undef TARGET_ASM_UNALIGNED_DI_OP
863 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
866 #undef TARGET_ASM_UNALIGNED_HI_OP
867 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
868 #undef TARGET_ASM_UNALIGNED_SI_OP
869 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
870 #undef TARGET_ASM_UNALIGNED_DI_OP
871 #define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
872 #undef TARGET_ASM_ALIGNED_DI_OP
873 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
877 /* This hook deals with fixups for relocatable code and DI-mode objects
879 #undef TARGET_ASM_INTEGER
880 #define TARGET_ASM_INTEGER rs6000_assemble_integer
882 #ifdef HAVE_GAS_HIDDEN
883 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
884 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
887 #undef TARGET_HAVE_TLS
888 #define TARGET_HAVE_TLS HAVE_AS_TLS
890 #undef TARGET_CANNOT_FORCE_CONST_MEM
891 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
893 #undef TARGET_ASM_FUNCTION_PROLOGUE
894 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
895 #undef TARGET_ASM_FUNCTION_EPILOGUE
896 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
898 #undef TARGET_SCHED_VARIABLE_ISSUE
899 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
901 #undef TARGET_SCHED_ISSUE_RATE
902 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
903 #undef TARGET_SCHED_ADJUST_COST
904 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
905 #undef TARGET_SCHED_ADJUST_PRIORITY
906 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
907 #undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
908 #define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
909 #undef TARGET_SCHED_FINISH
910 #define TARGET_SCHED_FINISH rs6000_sched_finish
912 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
913 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
915 #undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
916 #define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
918 #undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_STORE
919 #define TARGET_VECTORIZE_BUILTIN_MASK_FOR_STORE rs6000_builtin_mask_for_store
921 #undef TARGET_INIT_BUILTINS
922 #define TARGET_INIT_BUILTINS rs6000_init_builtins
924 #undef TARGET_EXPAND_BUILTIN
925 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
927 #undef TARGET_MANGLE_FUNDAMENTAL_TYPE
928 #define TARGET_MANGLE_FUNDAMENTAL_TYPE rs6000_mangle_fundamental_type
930 #undef TARGET_INIT_LIBFUNCS
931 #define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
934 #undef TARGET_BINDS_LOCAL_P
935 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
938 #undef TARGET_ASM_OUTPUT_MI_THUNK
939 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
941 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
942 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
944 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
945 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
947 #undef TARGET_RTX_COSTS
948 #define TARGET_RTX_COSTS rs6000_rtx_costs
949 #undef TARGET_ADDRESS_COST
950 #define TARGET_ADDRESS_COST hook_int_rtx_0
952 #undef TARGET_VECTOR_OPAQUE_P
953 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
955 #undef TARGET_DWARF_REGISTER_SPAN
956 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
958 /* On rs6000, function arguments are promoted, as are function return
960 #undef TARGET_PROMOTE_FUNCTION_ARGS
961 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
962 #undef TARGET_PROMOTE_FUNCTION_RETURN
963 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
965 #undef TARGET_RETURN_IN_MEMORY
966 #define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
968 #undef TARGET_SETUP_INCOMING_VARARGS
969 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
971 /* Always strict argument naming on rs6000. */
972 #undef TARGET_STRICT_ARGUMENT_NAMING
973 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
974 #undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
975 #define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
976 #undef TARGET_SPLIT_COMPLEX_ARG
977 #define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
978 #undef TARGET_MUST_PASS_IN_STACK
979 #define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
980 #undef TARGET_PASS_BY_REFERENCE
981 #define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
983 #undef TARGET_BUILD_BUILTIN_VA_LIST
984 #define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
986 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
987 #define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
989 #undef TARGET_EH_RETURN_FILTER_MODE
990 #define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
992 #undef TARGET_VECTOR_MODE_SUPPORTED_P
993 #define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
995 struct gcc_target targetm
= TARGET_INITIALIZER
;
998 /* Value is 1 if hard register REGNO can hold a value of machine-mode
1001 rs6000_hard_regno_mode_ok (int regno
, enum machine_mode mode
)
1003 /* The GPRs can hold any mode, but values bigger than one register
1004 cannot go past R31. */
1005 if (INT_REGNO_P (regno
))
1006 return INT_REGNO_P (regno
+ HARD_REGNO_NREGS (regno
, mode
) - 1);
1008 /* The float registers can only hold floating modes and DImode. */
1009 if (FP_REGNO_P (regno
))
1011 (GET_MODE_CLASS (mode
) == MODE_FLOAT
1012 && FP_REGNO_P (regno
+ HARD_REGNO_NREGS (regno
, mode
) - 1))
1013 || (GET_MODE_CLASS (mode
) == MODE_INT
1014 && GET_MODE_SIZE (mode
) == UNITS_PER_FP_WORD
);
1016 /* The CR register can only hold CC modes. */
1017 if (CR_REGNO_P (regno
))
1018 return GET_MODE_CLASS (mode
) == MODE_CC
;
1020 if (XER_REGNO_P (regno
))
1021 return mode
== PSImode
;
1023 /* AltiVec only in AldyVec registers. */
1024 if (ALTIVEC_REGNO_P (regno
))
1025 return ALTIVEC_VECTOR_MODE (mode
);
1027 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1028 if (SPE_SIMD_REGNO_P (regno
) && TARGET_SPE
&& SPE_VECTOR_MODE (mode
))
1031 /* We cannot put TImode anywhere except general register and it must be
1032 able to fit within the register set. */
1034 return GET_MODE_SIZE (mode
) <= UNITS_PER_WORD
;
1037 /* Initialize rs6000_hard_regno_mode_ok_p table. */
1039 rs6000_init_hard_regno_mode_ok (void)
1043 for (r
= 0; r
< FIRST_PSEUDO_REGISTER
; ++r
)
1044 for (m
= 0; m
< NUM_MACHINE_MODES
; ++m
)
1045 if (rs6000_hard_regno_mode_ok (r
, m
))
1046 rs6000_hard_regno_mode_ok_p
[m
][r
] = true;
1049 /* If not otherwise specified by a target, make 'long double' equivalent to
1052 #ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1053 #define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1056 /* Override command line options. Mostly we process the processor
1057 type and sometimes adjust other TARGET_ options. */
1060 rs6000_override_options (const char *default_cpu
)
1063 struct rs6000_cpu_select
*ptr
;
1066 /* Simplifications for entries below. */
1069 POWERPC_BASE_MASK
= MASK_POWERPC
| MASK_NEW_MNEMONICS
,
1070 POWERPC_7400_MASK
= POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_ALTIVEC
1073 /* This table occasionally claims that a processor does not support
1074 a particular feature even though it does, but the feature is slower
1075 than the alternative. Thus, it shouldn't be relied on as a
1076 complete description of the processor's support.
1078 Please keep this list in order, and don't forget to update the
1079 documentation in invoke.texi when adding a new processor or
1083 const char *const name
; /* Canonical processor name. */
1084 const enum processor_type processor
; /* Processor type enum value. */
1085 const int target_enable
; /* Target flags to enable. */
1086 } const processor_target_table
[]
1087 = {{"401", PROCESSOR_PPC403
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1088 {"403", PROCESSOR_PPC403
,
1089 POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
| MASK_STRICT_ALIGN
},
1090 {"405", PROCESSOR_PPC405
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1091 {"405fp", PROCESSOR_PPC405
, POWERPC_BASE_MASK
},
1092 {"440", PROCESSOR_PPC440
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1093 {"440fp", PROCESSOR_PPC440
, POWERPC_BASE_MASK
},
1094 {"505", PROCESSOR_MPCCORE
, POWERPC_BASE_MASK
},
1095 {"601", PROCESSOR_PPC601
,
1096 MASK_POWER
| POWERPC_BASE_MASK
| MASK_MULTIPLE
| MASK_STRING
},
1097 {"602", PROCESSOR_PPC603
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1098 {"603", PROCESSOR_PPC603
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1099 {"603e", PROCESSOR_PPC603
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1100 {"604", PROCESSOR_PPC604
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1101 {"604e", PROCESSOR_PPC604e
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1102 {"620", PROCESSOR_PPC620
,
1103 POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_POWERPC64
},
1104 {"630", PROCESSOR_PPC630
,
1105 POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_POWERPC64
},
1106 {"740", PROCESSOR_PPC750
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1107 {"7400", PROCESSOR_PPC7400
, POWERPC_7400_MASK
},
1108 {"7450", PROCESSOR_PPC7450
, POWERPC_7400_MASK
},
1109 {"750", PROCESSOR_PPC750
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1110 {"801", PROCESSOR_MPCCORE
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1111 {"821", PROCESSOR_MPCCORE
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1112 {"823", PROCESSOR_MPCCORE
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1113 {"8540", PROCESSOR_PPC8540
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1114 {"860", PROCESSOR_MPCCORE
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1115 {"970", PROCESSOR_POWER4
,
1116 POWERPC_7400_MASK
| MASK_PPC_GPOPT
| MASK_MFCRF
| MASK_POWERPC64
},
1117 {"common", PROCESSOR_COMMON
, MASK_NEW_MNEMONICS
},
1118 {"ec603e", PROCESSOR_PPC603
, POWERPC_BASE_MASK
| MASK_SOFT_FLOAT
},
1119 {"G3", PROCESSOR_PPC750
, POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
},
1120 {"G4", PROCESSOR_PPC7450
, POWERPC_7400_MASK
},
1121 {"G5", PROCESSOR_POWER4
,
1122 POWERPC_7400_MASK
| MASK_PPC_GPOPT
| MASK_MFCRF
| MASK_POWERPC64
},
1123 {"power", PROCESSOR_POWER
, MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
},
1124 {"power2", PROCESSOR_POWER
,
1125 MASK_POWER
| MASK_POWER2
| MASK_MULTIPLE
| MASK_STRING
},
1126 {"power3", PROCESSOR_PPC630
,
1127 POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_POWERPC64
},
1128 {"power4", PROCESSOR_POWER4
,
1129 POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_MFCRF
| MASK_POWERPC64
},
1130 {"power5", PROCESSOR_POWER5
,
1131 POWERPC_BASE_MASK
| MASK_PPC_GFXOPT
| MASK_MFCRF
| MASK_POWERPC64
},
1132 {"powerpc", PROCESSOR_POWERPC
, POWERPC_BASE_MASK
},
1133 {"powerpc64", PROCESSOR_POWERPC64
,
1134 POWERPC_BASE_MASK
| MASK_POWERPC64
},
1135 {"rios", PROCESSOR_RIOS1
, MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
},
1136 {"rios1", PROCESSOR_RIOS1
, MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
},
1137 {"rios2", PROCESSOR_RIOS2
,
1138 MASK_POWER
| MASK_POWER2
| MASK_MULTIPLE
| MASK_STRING
},
1139 {"rsc", PROCESSOR_PPC601
, MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
},
1140 {"rsc1", PROCESSOR_PPC601
, MASK_POWER
| MASK_MULTIPLE
| MASK_STRING
},
1141 {"rs64a", PROCESSOR_RS64A
, POWERPC_BASE_MASK
| MASK_POWERPC64
},
1144 const size_t ptt_size
= ARRAY_SIZE (processor_target_table
);
1146 /* Some OSs don't support saving the high part of 64-bit registers on
1147 context switch. Other OSs don't support saving Altivec registers.
1148 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1149 settings; if the user wants either, the user must explicitly specify
1150 them and we won't interfere with the user's specification. */
1153 POWER_MASKS
= MASK_POWER
| MASK_POWER2
| MASK_MULTIPLE
| MASK_STRING
,
1154 POWERPC_MASKS
= (POWERPC_BASE_MASK
| MASK_PPC_GPOPT
1155 | MASK_PPC_GFXOPT
| MASK_POWERPC64
| MASK_ALTIVEC
1159 rs6000_init_hard_regno_mode_ok ();
1161 set_masks
= POWER_MASKS
| POWERPC_MASKS
| MASK_SOFT_FLOAT
;
1162 #ifdef OS_MISSING_POWERPC64
1163 if (OS_MISSING_POWERPC64
)
1164 set_masks
&= ~MASK_POWERPC64
;
1166 #ifdef OS_MISSING_ALTIVEC
1167 if (OS_MISSING_ALTIVEC
)
1168 set_masks
&= ~MASK_ALTIVEC
;
1171 /* Don't override these by the processor default if given explicitly. */
1172 set_masks
&= ~(target_flags_explicit
1173 & (MASK_MULTIPLE
| MASK_STRING
| MASK_SOFT_FLOAT
));
1175 /* Identify the processor type. */
1176 rs6000_select
[0].string
= default_cpu
;
1177 rs6000_cpu
= TARGET_POWERPC64
? PROCESSOR_DEFAULT64
: PROCESSOR_DEFAULT
;
1179 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
1181 ptr
= &rs6000_select
[i
];
1182 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
1184 for (j
= 0; j
< ptt_size
; j
++)
1185 if (! strcmp (ptr
->string
, processor_target_table
[j
].name
))
1187 if (ptr
->set_tune_p
)
1188 rs6000_cpu
= processor_target_table
[j
].processor
;
1190 if (ptr
->set_arch_p
)
1192 target_flags
&= ~set_masks
;
1193 target_flags
|= (processor_target_table
[j
].target_enable
1200 error ("bad value (%s) for %s switch", ptr
->string
, ptr
->name
);
1207 /* If we are optimizing big endian systems for space, use the load/store
1208 multiple and string instructions. */
1209 if (BYTES_BIG_ENDIAN
&& optimize_size
)
1210 target_flags
|= ~target_flags_explicit
& (MASK_MULTIPLE
| MASK_STRING
);
1212 /* Don't allow -mmultiple or -mstring on little endian systems
1213 unless the cpu is a 750, because the hardware doesn't support the
1214 instructions used in little endian mode, and causes an alignment
1215 trap. The 750 does not cause an alignment trap (except when the
1216 target is unaligned). */
1218 if (!BYTES_BIG_ENDIAN
&& rs6000_cpu
!= PROCESSOR_PPC750
)
1220 if (TARGET_MULTIPLE
)
1222 target_flags
&= ~MASK_MULTIPLE
;
1223 if ((target_flags_explicit
& MASK_MULTIPLE
) != 0)
1224 warning ("-mmultiple is not supported on little endian systems");
1229 target_flags
&= ~MASK_STRING
;
1230 if ((target_flags_explicit
& MASK_STRING
) != 0)
1231 warning ("-mstring is not supported on little endian systems");
1235 /* Set debug flags */
1236 if (rs6000_debug_name
)
1238 if (! strcmp (rs6000_debug_name
, "all"))
1239 rs6000_debug_stack
= rs6000_debug_arg
= 1;
1240 else if (! strcmp (rs6000_debug_name
, "stack"))
1241 rs6000_debug_stack
= 1;
1242 else if (! strcmp (rs6000_debug_name
, "arg"))
1243 rs6000_debug_arg
= 1;
1245 error ("unknown -mdebug-%s switch", rs6000_debug_name
);
1248 if (rs6000_traceback_name
)
1250 if (! strncmp (rs6000_traceback_name
, "full", 4))
1251 rs6000_traceback
= traceback_full
;
1252 else if (! strncmp (rs6000_traceback_name
, "part", 4))
1253 rs6000_traceback
= traceback_part
;
1254 else if (! strncmp (rs6000_traceback_name
, "no", 2))
1255 rs6000_traceback
= traceback_none
;
1257 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
1258 rs6000_traceback_name
);
1261 /* Set size of long double */
1262 rs6000_long_double_type_size
= RS6000_DEFAULT_LONG_DOUBLE_SIZE
;
1263 if (rs6000_long_double_size_string
)
1266 int size
= strtol (rs6000_long_double_size_string
, &tail
, 10);
1267 if (*tail
!= '\0' || (size
!= 64 && size
!= 128))
1268 error ("Unknown switch -mlong-double-%s",
1269 rs6000_long_double_size_string
);
1271 rs6000_long_double_type_size
= size
;
1274 /* Set Altivec ABI as default for powerpc64 linux. */
1275 if (TARGET_ELF
&& TARGET_64BIT
)
1277 rs6000_altivec_abi
= 1;
1278 rs6000_altivec_vrsave
= 1;
1281 /* Handle -mabi= options. */
1282 rs6000_parse_abi_options ();
1284 /* Handle -malign-XXXXX option. */
1285 rs6000_parse_alignment_option ();
1287 /* Handle generic -mFOO=YES/NO options. */
1288 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string
,
1289 &rs6000_altivec_vrsave
);
1290 rs6000_parse_yes_no_option ("isel", rs6000_isel_string
,
1292 rs6000_parse_yes_no_option ("spe", rs6000_spe_string
, &rs6000_spe
);
1293 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string
,
1294 &rs6000_float_gprs
);
1296 /* Handle -mtls-size option. */
1297 rs6000_parse_tls_size_option ();
1299 #ifdef SUBTARGET_OVERRIDE_OPTIONS
1300 SUBTARGET_OVERRIDE_OPTIONS
;
1302 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1303 SUBSUBTARGET_OVERRIDE_OPTIONS
;
1309 error ("AltiVec and E500 instructions cannot coexist");
1311 /* The e500 does not have string instructions, and we set
1312 MASK_STRING above when optimizing for size. */
1313 if ((target_flags
& MASK_STRING
) != 0)
1314 target_flags
= target_flags
& ~MASK_STRING
;
1316 /* No SPE means 64-bit long doubles, even if an E500. */
1317 if (rs6000_spe_string
!= 0
1318 && !strcmp (rs6000_spe_string
, "no"))
1319 rs6000_long_double_type_size
= 64;
1321 else if (rs6000_select
[1].string
!= NULL
)
1323 /* For the powerpc-eabispe configuration, we set all these by
1324 default, so let's unset them if we manually set another
1325 CPU that is not the E500. */
1326 if (rs6000_abi_string
== 0)
1328 if (rs6000_spe_string
== 0)
1330 if (rs6000_float_gprs_string
== 0)
1331 rs6000_float_gprs
= 0;
1332 if (rs6000_isel_string
== 0)
1334 if (rs6000_long_double_size_string
== 0)
1335 rs6000_long_double_type_size
= RS6000_DEFAULT_LONG_DOUBLE_SIZE
;
1338 rs6000_always_hint
= (rs6000_cpu
!= PROCESSOR_POWER4
1339 && rs6000_cpu
!= PROCESSOR_POWER5
);
1340 rs6000_sched_groups
= (rs6000_cpu
== PROCESSOR_POWER4
1341 || rs6000_cpu
== PROCESSOR_POWER5
);
1343 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
1344 using TARGET_OPTIONS to handle a toggle switch, but we're out of
1345 bits in target_flags so TARGET_SWITCHES cannot be used.
1346 Assumption here is that rs6000_longcall_switch points into the
1347 text of the complete option, rather than being a copy, so we can
1348 scan back for the presence or absence of the no- modifier. */
1349 if (rs6000_longcall_switch
)
1351 const char *base
= rs6000_longcall_switch
;
1352 while (base
[-1] != 'm') base
--;
1354 if (*rs6000_longcall_switch
!= '\0')
1355 error ("invalid option `%s'", base
);
1356 rs6000_default_long_calls
= (base
[0] != 'n');
1359 /* Handle -m(no-)warn-altivec-long similarly. */
1360 if (rs6000_warn_altivec_long_switch
)
1362 const char *base
= rs6000_warn_altivec_long_switch
;
1363 while (base
[-1] != 'm') base
--;
1365 if (*rs6000_warn_altivec_long_switch
!= '\0')
1366 error ("invalid option `%s'", base
);
1367 rs6000_warn_altivec_long
= (base
[0] != 'n');
1370 /* Handle -mprioritize-restricted-insns option. */
1371 rs6000_sched_restricted_insns_priority
1372 = (rs6000_sched_groups
? 1 : 0);
1373 if (rs6000_sched_restricted_insns_priority_str
)
1374 rs6000_sched_restricted_insns_priority
=
1375 atoi (rs6000_sched_restricted_insns_priority_str
);
1377 /* Handle -msched-costly-dep option. */
1378 rs6000_sched_costly_dep
1379 = (rs6000_sched_groups
? store_to_load_dep_costly
: no_dep_costly
);
1380 if (rs6000_sched_costly_dep_str
)
1382 if (! strcmp (rs6000_sched_costly_dep_str
, "no"))
1383 rs6000_sched_costly_dep
= no_dep_costly
;
1384 else if (! strcmp (rs6000_sched_costly_dep_str
, "all"))
1385 rs6000_sched_costly_dep
= all_deps_costly
;
1386 else if (! strcmp (rs6000_sched_costly_dep_str
, "true_store_to_load"))
1387 rs6000_sched_costly_dep
= true_store_to_load_dep_costly
;
1388 else if (! strcmp (rs6000_sched_costly_dep_str
, "store_to_load"))
1389 rs6000_sched_costly_dep
= store_to_load_dep_costly
;
1391 rs6000_sched_costly_dep
= atoi (rs6000_sched_costly_dep_str
);
1394 /* Handle -minsert-sched-nops option. */
1395 rs6000_sched_insert_nops
1396 = (rs6000_sched_groups
? sched_finish_regroup_exact
: sched_finish_none
);
1397 if (rs6000_sched_insert_nops_str
)
1399 if (! strcmp (rs6000_sched_insert_nops_str
, "no"))
1400 rs6000_sched_insert_nops
= sched_finish_none
;
1401 else if (! strcmp (rs6000_sched_insert_nops_str
, "pad"))
1402 rs6000_sched_insert_nops
= sched_finish_pad_groups
;
1403 else if (! strcmp (rs6000_sched_insert_nops_str
, "regroup_exact"))
1404 rs6000_sched_insert_nops
= sched_finish_regroup_exact
;
1406 rs6000_sched_insert_nops
= atoi (rs6000_sched_insert_nops_str
);
1409 #ifdef TARGET_REGNAMES
1410 /* If the user desires alternate register names, copy in the
1411 alternate names now. */
1412 if (TARGET_REGNAMES
)
1413 memcpy (rs6000_reg_names
, alt_reg_names
, sizeof (rs6000_reg_names
));
1416 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
1417 If -maix-struct-return or -msvr4-struct-return was explicitly
1418 used, don't override with the ABI default. */
1419 if ((target_flags_explicit
& MASK_AIX_STRUCT_RET
) == 0)
1421 if (DEFAULT_ABI
== ABI_V4
&& !DRAFT_V4_STRUCT_RET
)
1422 target_flags
= (target_flags
& ~MASK_AIX_STRUCT_RET
);
1424 target_flags
|= MASK_AIX_STRUCT_RET
;
1427 if (TARGET_LONG_DOUBLE_128
1428 && (DEFAULT_ABI
== ABI_AIX
|| DEFAULT_ABI
== ABI_DARWIN
))
1429 REAL_MODE_FORMAT (TFmode
) = &ibm_extended_format
;
1431 /* Allocate an alias set for register saves & restores from stack. */
1432 rs6000_sr_alias_set
= new_alias_set ();
1435 ASM_GENERATE_INTERNAL_LABEL (toc_label_name
, "LCTOC", 1);
1437 /* We can only guarantee the availability of DI pseudo-ops when
1438 assembling for 64-bit targets. */
1441 targetm
.asm_out
.aligned_op
.di
= NULL
;
1442 targetm
.asm_out
.unaligned_op
.di
= NULL
;
1445 /* Set branch target alignment, if not optimizing for size. */
1448 if (rs6000_sched_groups
)
1450 if (align_functions
<= 0)
1451 align_functions
= 16;
1452 if (align_jumps
<= 0)
1454 if (align_loops
<= 0)
1457 if (align_jumps_max_skip
<= 0)
1458 align_jumps_max_skip
= 15;
1459 if (align_loops_max_skip
<= 0)
1460 align_loops_max_skip
= 15;
1463 /* Arrange to save and restore machine status around nested functions. */
1464 init_machine_status
= rs6000_init_machine_status
;
1466 /* We should always be splitting complex arguments, but we can't break
1467 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
1468 if (DEFAULT_ABI
!= ABI_AIX
)
1469 targetm
.calls
.split_complex_arg
= NULL
;
1471 /* Initialize rs6000_cost with the appropriate target costs. */
1473 rs6000_cost
= TARGET_POWERPC64
? &size64_cost
: &size32_cost
;
1477 case PROCESSOR_RIOS1
:
1478 rs6000_cost
= &rios1_cost
;
1481 case PROCESSOR_RIOS2
:
1482 rs6000_cost
= &rios2_cost
;
1485 case PROCESSOR_RS64A
:
1486 rs6000_cost
= &rs64a_cost
;
1489 case PROCESSOR_MPCCORE
:
1490 rs6000_cost
= &mpccore_cost
;
1493 case PROCESSOR_PPC403
:
1494 rs6000_cost
= &ppc403_cost
;
1497 case PROCESSOR_PPC405
:
1498 rs6000_cost
= &ppc405_cost
;
1501 case PROCESSOR_PPC440
:
1502 rs6000_cost
= &ppc440_cost
;
1505 case PROCESSOR_PPC601
:
1506 rs6000_cost
= &ppc601_cost
;
1509 case PROCESSOR_PPC603
:
1510 rs6000_cost
= &ppc603_cost
;
1513 case PROCESSOR_PPC604
:
1514 rs6000_cost
= &ppc604_cost
;
1517 case PROCESSOR_PPC604e
:
1518 rs6000_cost
= &ppc604e_cost
;
1521 case PROCESSOR_PPC620
:
1522 rs6000_cost
= &ppc620_cost
;
1525 case PROCESSOR_PPC630
:
1526 rs6000_cost
= &ppc630_cost
;
1529 case PROCESSOR_PPC750
:
1530 case PROCESSOR_PPC7400
:
1531 rs6000_cost
= &ppc750_cost
;
1534 case PROCESSOR_PPC7450
:
1535 rs6000_cost
= &ppc7450_cost
;
1538 case PROCESSOR_PPC8540
:
1539 rs6000_cost
= &ppc8540_cost
;
1542 case PROCESSOR_POWER4
:
1543 case PROCESSOR_POWER5
:
1544 rs6000_cost
= &power4_cost
;
1552 /* Implement targetm.vectorize.builtin_mask_for_load. */
1554 rs6000_builtin_mask_for_load (void)
1557 return altivec_builtin_mask_for_load
;
1562 /* Implement targetm.vectorize.builtin_mask_for_store. */
1564 rs6000_builtin_mask_for_store (void)
1567 return altivec_builtin_mask_for_store
;
1572 /* Handle generic options of the form -mfoo=yes/no.
1573 NAME is the option name.
1574 VALUE is the option value.
1575 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1576 whether the option value is 'yes' or 'no' respectively. */
1578 rs6000_parse_yes_no_option (const char *name
, const char *value
, int *flag
)
1582 else if (!strcmp (value
, "yes"))
1584 else if (!strcmp (value
, "no"))
1587 error ("unknown -m%s= option specified: '%s'", name
, value
);
1590 /* Handle -mabi= options. */
1592 rs6000_parse_abi_options (void)
1594 if (rs6000_abi_string
== 0)
1596 else if (! strcmp (rs6000_abi_string
, "altivec"))
1598 rs6000_altivec_abi
= 1;
1601 else if (! strcmp (rs6000_abi_string
, "no-altivec"))
1602 rs6000_altivec_abi
= 0;
1603 else if (! strcmp (rs6000_abi_string
, "spe"))
1606 rs6000_altivec_abi
= 0;
1607 if (!TARGET_SPE_ABI
)
1608 error ("not configured for ABI: '%s'", rs6000_abi_string
);
1611 else if (! strcmp (rs6000_abi_string
, "no-spe"))
1614 error ("unknown ABI specified: '%s'", rs6000_abi_string
);
1617 /* Handle -malign-XXXXXX options. */
1619 rs6000_parse_alignment_option (void)
1621 if (rs6000_alignment_string
== 0)
1623 else if (! strcmp (rs6000_alignment_string
, "power"))
1624 rs6000_alignment_flags
= MASK_ALIGN_POWER
;
1625 else if (! strcmp (rs6000_alignment_string
, "natural"))
1626 rs6000_alignment_flags
= MASK_ALIGN_NATURAL
;
1628 error ("unknown -malign-XXXXX option specified: '%s'",
1629 rs6000_alignment_string
);
1632 /* Validate and record the size specified with the -mtls-size option. */
1635 rs6000_parse_tls_size_option (void)
1637 if (rs6000_tls_size_string
== 0)
1639 else if (strcmp (rs6000_tls_size_string
, "16") == 0)
1640 rs6000_tls_size
= 16;
1641 else if (strcmp (rs6000_tls_size_string
, "32") == 0)
1642 rs6000_tls_size
= 32;
1643 else if (strcmp (rs6000_tls_size_string
, "64") == 0)
1644 rs6000_tls_size
= 64;
1646 error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string
);
1650 optimization_options (int level ATTRIBUTE_UNUSED
, int size ATTRIBUTE_UNUSED
)
1654 /* Do anything needed at the start of the asm file. */
1657 rs6000_file_start (void)
1661 const char *start
= buffer
;
1662 struct rs6000_cpu_select
*ptr
;
1663 const char *default_cpu
= TARGET_CPU_DEFAULT
;
1664 FILE *file
= asm_out_file
;
1666 default_file_start ();
1668 #ifdef TARGET_BI_ARCH
1669 if ((TARGET_DEFAULT
^ target_flags
) & MASK_64BIT
)
1673 if (flag_verbose_asm
)
1675 sprintf (buffer
, "\n%s rs6000/powerpc options:", ASM_COMMENT_START
);
1676 rs6000_select
[0].string
= default_cpu
;
1678 for (i
= 0; i
< ARRAY_SIZE (rs6000_select
); i
++)
1680 ptr
= &rs6000_select
[i
];
1681 if (ptr
->string
!= (char *)0 && ptr
->string
[0] != '\0')
1683 fprintf (file
, "%s %s%s", start
, ptr
->name
, ptr
->string
);
1688 #ifdef USING_ELFOS_H
1689 switch (rs6000_sdata
)
1691 case SDATA_NONE
: fprintf (file
, "%s -msdata=none", start
); start
= ""; break;
1692 case SDATA_DATA
: fprintf (file
, "%s -msdata=data", start
); start
= ""; break;
1693 case SDATA_SYSV
: fprintf (file
, "%s -msdata=sysv", start
); start
= ""; break;
1694 case SDATA_EABI
: fprintf (file
, "%s -msdata=eabi", start
); start
= ""; break;
1697 if (rs6000_sdata
&& g_switch_value
)
1699 fprintf (file
, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED
, start
,
1710 /* Return nonzero if this function is known to have a null epilogue. */
1713 direct_return (void)
1715 if (reload_completed
)
1717 rs6000_stack_t
*info
= rs6000_stack_info ();
1719 if (info
->first_gp_reg_save
== 32
1720 && info
->first_fp_reg_save
== 64
1721 && info
->first_altivec_reg_save
== LAST_ALTIVEC_REGNO
+ 1
1722 && ! info
->lr_save_p
1723 && ! info
->cr_save_p
1724 && info
->vrsave_mask
== 0
1732 /* Returns 1 always. */
1735 any_operand (rtx op ATTRIBUTE_UNUSED
,
1736 enum machine_mode mode ATTRIBUTE_UNUSED
)
1741 /* Returns 1 always. */
1744 any_parallel_operand (rtx op ATTRIBUTE_UNUSED
,
1745 enum machine_mode mode ATTRIBUTE_UNUSED
)
1750 /* Returns 1 if op is the count register. */
1753 count_register_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1755 if (GET_CODE (op
) != REG
)
1758 if (REGNO (op
) == COUNT_REGISTER_REGNUM
)
1761 if (REGNO (op
) > FIRST_PSEUDO_REGISTER
)
1767 /* Returns 1 if op is an altivec register. */
1770 altivec_register_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1773 return (register_operand (op
, mode
)
1774 && (GET_CODE (op
) != REG
1775 || REGNO (op
) > FIRST_PSEUDO_REGISTER
1776 || ALTIVEC_REGNO_P (REGNO (op
))));
1780 xer_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1782 if (GET_CODE (op
) != REG
)
1785 if (XER_REGNO_P (REGNO (op
)))
1791 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
1792 by such constants completes more quickly. */
1795 s8bit_cint_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1797 return ( GET_CODE (op
) == CONST_INT
1798 && (INTVAL (op
) >= -128 && INTVAL (op
) <= 127));
1801 /* Return 1 if OP is a constant that can fit in a D field. */
1804 short_cint_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1806 return (GET_CODE (op
) == CONST_INT
1807 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I'));
1810 /* Similar for an unsigned D field. */
1813 u_short_cint_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1815 return (GET_CODE (op
) == CONST_INT
1816 && CONST_OK_FOR_LETTER_P (INTVAL (op
) & GET_MODE_MASK (mode
), 'K'));
1819 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1822 non_short_cint_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1824 return (GET_CODE (op
) == CONST_INT
1825 && (unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x8000) >= 0x10000);
1828 /* Returns 1 if OP is a CONST_INT that is a positive value
1829 and an exact power of 2. */
1832 exact_log2_cint_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1834 return (GET_CODE (op
) == CONST_INT
1836 && exact_log2 (INTVAL (op
)) >= 0);
1839 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1843 gpc_reg_operand (rtx op
, enum machine_mode mode
)
1845 return (register_operand (op
, mode
)
1846 && (GET_CODE (op
) != REG
1847 || (REGNO (op
) >= ARG_POINTER_REGNUM
1848 && !XER_REGNO_P (REGNO (op
)))
1849 || REGNO (op
) < MQ_REGNO
));
1852 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1856 cc_reg_operand (rtx op
, enum machine_mode mode
)
1858 return (register_operand (op
, mode
)
1859 && (GET_CODE (op
) != REG
1860 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
1861 || CR_REGNO_P (REGNO (op
))));
1864 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1865 CR field that isn't CR0. */
1868 cc_reg_not_cr0_operand (rtx op
, enum machine_mode mode
)
1870 return (register_operand (op
, mode
)
1871 && (GET_CODE (op
) != REG
1872 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
1873 || CR_REGNO_NOT_CR0_P (REGNO (op
))));
1876 /* Returns 1 if OP is either a constant integer valid for a D-field or
1877 a non-special register. If a register, it must be in the proper
1878 mode unless MODE is VOIDmode. */
1881 reg_or_short_operand (rtx op
, enum machine_mode mode
)
1883 return short_cint_operand (op
, mode
) || gpc_reg_operand (op
, mode
);
1886 /* Similar, except check if the negation of the constant would be
1887 valid for a D-field. Don't allow a constant zero, since all the
1888 patterns that call this predicate use "addic r1,r2,-constant" on
1889 a constant value to set a carry when r2 is greater or equal to
1890 "constant". That doesn't work for zero. */
1893 reg_or_neg_short_operand (rtx op
, enum machine_mode mode
)
1895 if (GET_CODE (op
) == CONST_INT
)
1896 return CONST_OK_FOR_LETTER_P (INTVAL (op
), 'P') && INTVAL (op
) != 0;
1898 return gpc_reg_operand (op
, mode
);
1901 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1902 a non-special register. If a register, it must be in the proper
1903 mode unless MODE is VOIDmode. */
1906 reg_or_aligned_short_operand (rtx op
, enum machine_mode mode
)
1908 if (gpc_reg_operand (op
, mode
))
1910 else if (short_cint_operand (op
, mode
) && !(INTVAL (op
) & 3))
1917 /* Return 1 if the operand is either a register or an integer whose
1918 high-order 16 bits are zero. */
1921 reg_or_u_short_operand (rtx op
, enum machine_mode mode
)
1923 return u_short_cint_operand (op
, mode
) || gpc_reg_operand (op
, mode
);
1926 /* Return 1 is the operand is either a non-special register or ANY
1927 constant integer. */
1930 reg_or_cint_operand (rtx op
, enum machine_mode mode
)
1932 return (GET_CODE (op
) == CONST_INT
|| gpc_reg_operand (op
, mode
));
1935 /* Return 1 is the operand is either a non-special register or ANY
1936 32-bit signed constant integer. */
1939 reg_or_arith_cint_operand (rtx op
, enum machine_mode mode
)
1941 return (gpc_reg_operand (op
, mode
)
1942 || (GET_CODE (op
) == CONST_INT
1943 #if HOST_BITS_PER_WIDE_INT != 32
1944 && ((unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x80000000)
1945 < (unsigned HOST_WIDE_INT
) 0x100000000ll
)
1950 /* Return 1 is the operand is either a non-special register or a 32-bit
1951 signed constant integer valid for 64-bit addition. */
1954 reg_or_add_cint64_operand (rtx op
, enum machine_mode mode
)
1956 return (gpc_reg_operand (op
, mode
)
1957 || (GET_CODE (op
) == CONST_INT
1958 #if HOST_BITS_PER_WIDE_INT == 32
1959 && INTVAL (op
) < 0x7fff8000
1961 && ((unsigned HOST_WIDE_INT
) (INTVAL (op
) + 0x80008000)
1967 /* Return 1 is the operand is either a non-special register or a 32-bit
1968 signed constant integer valid for 64-bit subtraction. */
1971 reg_or_sub_cint64_operand (rtx op
, enum machine_mode mode
)
1973 return (gpc_reg_operand (op
, mode
)
1974 || (GET_CODE (op
) == CONST_INT
1975 #if HOST_BITS_PER_WIDE_INT == 32
1976 && (- INTVAL (op
)) < 0x7fff8000
1978 && ((unsigned HOST_WIDE_INT
) ((- INTVAL (op
)) + 0x80008000)
1984 /* Return 1 is the operand is either a non-special register or ANY
1985 32-bit unsigned constant integer. */
1988 reg_or_logical_cint_operand (rtx op
, enum machine_mode mode
)
1990 if (GET_CODE (op
) == CONST_INT
)
1992 if (GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
)
1994 if (GET_MODE_BITSIZE (mode
) <= 32)
1997 if (INTVAL (op
) < 0)
2001 return ((INTVAL (op
) & GET_MODE_MASK (mode
)
2002 & (~ (unsigned HOST_WIDE_INT
) 0xffffffff)) == 0);
2004 else if (GET_CODE (op
) == CONST_DOUBLE
)
2006 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
2010 return CONST_DOUBLE_HIGH (op
) == 0;
2013 return gpc_reg_operand (op
, mode
);
2016 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
2019 got_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2021 return (GET_CODE (op
) == SYMBOL_REF
2022 || GET_CODE (op
) == CONST
2023 || GET_CODE (op
) == LABEL_REF
);
2026 /* Return 1 if the operand is a simple references that can be loaded via
2027 the GOT (labels involving addition aren't allowed). */
2030 got_no_const_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2032 return (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
);
2035 /* Return the number of instructions it takes to form a constant in an
2036 integer register. */
2039 num_insns_constant_wide (HOST_WIDE_INT value
)
2041 /* signed constant loadable with {cal|addi} */
2042 if (CONST_OK_FOR_LETTER_P (value
, 'I'))
2045 /* constant loadable with {cau|addis} */
2046 else if (CONST_OK_FOR_LETTER_P (value
, 'L'))
2049 #if HOST_BITS_PER_WIDE_INT == 64
2050 else if (TARGET_POWERPC64
)
2052 HOST_WIDE_INT low
= ((value
& 0xffffffff) ^ 0x80000000) - 0x80000000;
2053 HOST_WIDE_INT high
= value
>> 31;
2055 if (high
== 0 || high
== -1)
2061 return num_insns_constant_wide (high
) + 1;
2063 return (num_insns_constant_wide (high
)
2064 + num_insns_constant_wide (low
) + 1);
2073 num_insns_constant (rtx op
, enum machine_mode mode
)
2075 if (GET_CODE (op
) == CONST_INT
)
2077 #if HOST_BITS_PER_WIDE_INT == 64
2078 if ((INTVAL (op
) >> 31) != 0 && (INTVAL (op
) >> 31) != -1
2079 && mask64_operand (op
, mode
))
2083 return num_insns_constant_wide (INTVAL (op
));
2086 else if (GET_CODE (op
) == CONST_DOUBLE
&& mode
== SFmode
)
2091 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
2092 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
2093 return num_insns_constant_wide ((HOST_WIDE_INT
) l
);
2096 else if (GET_CODE (op
) == CONST_DOUBLE
)
2102 int endian
= (WORDS_BIG_ENDIAN
== 0);
2104 if (mode
== VOIDmode
|| mode
== DImode
)
2106 high
= CONST_DOUBLE_HIGH (op
);
2107 low
= CONST_DOUBLE_LOW (op
);
2111 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
2112 REAL_VALUE_TO_TARGET_DOUBLE (rv
, l
);
2114 low
= l
[1 - endian
];
2118 return (num_insns_constant_wide (low
)
2119 + num_insns_constant_wide (high
));
2123 if (high
== 0 && low
>= 0)
2124 return num_insns_constant_wide (low
);
2126 else if (high
== -1 && low
< 0)
2127 return num_insns_constant_wide (low
);
2129 else if (mask64_operand (op
, mode
))
2133 return num_insns_constant_wide (high
) + 1;
2136 return (num_insns_constant_wide (high
)
2137 + num_insns_constant_wide (low
) + 1);
2145 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
2146 register with one instruction per word. We only do this if we can
2147 safely read CONST_DOUBLE_{LOW,HIGH}. */
2150 easy_fp_constant (rtx op
, enum machine_mode mode
)
2152 if (GET_CODE (op
) != CONST_DOUBLE
2153 || GET_MODE (op
) != mode
2154 || (GET_MODE_CLASS (mode
) != MODE_FLOAT
&& mode
!= DImode
))
2157 /* Consider all constants with -msoft-float to be easy. */
2158 if ((TARGET_SOFT_FLOAT
|| !TARGET_FPRS
)
2162 /* If we are using V.4 style PIC, consider all constants to be hard. */
2163 if (flag_pic
&& DEFAULT_ABI
== ABI_V4
)
2166 #ifdef TARGET_RELOCATABLE
2167 /* Similarly if we are using -mrelocatable, consider all constants
2169 if (TARGET_RELOCATABLE
)
2178 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
2179 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv
, k
);
2181 return (num_insns_constant_wide ((HOST_WIDE_INT
) k
[0]) == 1
2182 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[1]) == 1
2183 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[2]) == 1
2184 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[3]) == 1);
2187 else if (mode
== DFmode
)
2192 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
2193 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
2195 return (num_insns_constant_wide ((HOST_WIDE_INT
) k
[0]) == 1
2196 && num_insns_constant_wide ((HOST_WIDE_INT
) k
[1]) == 1);
2199 else if (mode
== SFmode
)
2204 REAL_VALUE_FROM_CONST_DOUBLE (rv
, op
);
2205 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
2207 return num_insns_constant_wide (l
) == 1;
2210 else if (mode
== DImode
)
2211 return ((TARGET_POWERPC64
2212 && GET_CODE (op
) == CONST_DOUBLE
&& CONST_DOUBLE_LOW (op
) == 0)
2213 || (num_insns_constant (op
, DImode
) <= 2));
2215 else if (mode
== SImode
)
2221 /* Returns the constant for the splat instruction, if exists. */
2224 easy_vector_splat_const (int cst
, enum machine_mode mode
)
2229 if (EASY_VECTOR_15 (cst
)
2230 || EASY_VECTOR_15_ADD_SELF (cst
))
2232 if ((cst
& 0xffff) != ((cst
>> 16) & 0xffff))
2236 if (EASY_VECTOR_15 (cst
)
2237 || EASY_VECTOR_15_ADD_SELF (cst
))
2239 if ((cst
& 0xff) != ((cst
>> 8) & 0xff))
2243 if (EASY_VECTOR_15 (cst
)
2244 || EASY_VECTOR_15_ADD_SELF (cst
))
2253 /* Return nonzero if all elements of a vector have the same value. */
2256 easy_vector_same (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2260 units
= CONST_VECTOR_NUNITS (op
);
2262 cst
= INTVAL (CONST_VECTOR_ELT (op
, 0));
2263 for (i
= 1; i
< units
; ++i
)
2264 if (INTVAL (CONST_VECTOR_ELT (op
, i
)) != cst
)
2266 if (i
== units
&& easy_vector_splat_const (cst
, mode
))
2271 /* Return 1 if the operand is a CONST_INT and can be put into a
2272 register without using memory. */
2275 easy_vector_constant (rtx op
, enum machine_mode mode
)
2279 if (GET_CODE (op
) != CONST_VECTOR
2284 if (zero_constant (op
, mode
)
2285 && ((TARGET_ALTIVEC
&& ALTIVEC_VECTOR_MODE (mode
))
2286 || (TARGET_SPE
&& SPE_VECTOR_MODE (mode
))))
2289 if (GET_MODE_CLASS (mode
) != MODE_VECTOR_INT
)
2292 if (TARGET_SPE
&& mode
== V1DImode
)
2295 cst
= INTVAL (CONST_VECTOR_ELT (op
, 0));
2296 cst2
= INTVAL (CONST_VECTOR_ELT (op
, 1));
2298 /* Limit SPE vectors to 15 bits signed. These we can generate with:
2300 evmergelo r0, r0, r0
2303 I don't know how efficient it would be to allow bigger constants,
2304 considering we'll have an extra 'ori' for every 'li'. I doubt 5
2305 instructions is better than a 64-bit memory load, but I don't
2306 have the e500 timing specs. */
2307 if (TARGET_SPE
&& mode
== V2SImode
2308 && cst
>= -0x7fff && cst
<= 0x7fff
2309 && cst2
>= -0x7fff && cst2
<= 0x7fff)
2313 && easy_vector_same (op
, mode
))
2315 cst
= easy_vector_splat_const (cst
, mode
);
2316 if (EASY_VECTOR_15_ADD_SELF (cst
)
2317 || EASY_VECTOR_15 (cst
))
2323 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
2326 easy_vector_constant_add_self (rtx op
, enum machine_mode mode
)
2330 && GET_CODE (op
) == CONST_VECTOR
2331 && easy_vector_same (op
, mode
))
2333 cst
= easy_vector_splat_const (INTVAL (CONST_VECTOR_ELT (op
, 0)), mode
);
2334 if (EASY_VECTOR_15_ADD_SELF (cst
))
2340 /* Generate easy_vector_constant out of a easy_vector_constant_add_self. */
2343 gen_easy_vector_constant_add_self (rtx op
)
2347 units
= GET_MODE_NUNITS (GET_MODE (op
));
2348 v
= rtvec_alloc (units
);
2350 for (i
= 0; i
< units
; i
++)
2352 GEN_INT (INTVAL (CONST_VECTOR_ELT (op
, i
)) >> 1);
2353 return gen_rtx_raw_CONST_VECTOR (GET_MODE (op
), v
);
2357 output_vec_const_move (rtx
*operands
)
2360 enum machine_mode mode
;
2366 cst
= INTVAL (CONST_VECTOR_ELT (vec
, 0));
2367 cst2
= INTVAL (CONST_VECTOR_ELT (vec
, 1));
2368 mode
= GET_MODE (dest
);
2372 if (zero_constant (vec
, mode
))
2373 return "vxor %0,%0,%0";
2374 else if (easy_vector_constant (vec
, mode
))
2376 operands
[1] = GEN_INT (cst
);
2380 if (EASY_VECTOR_15 (cst
))
2382 operands
[1] = GEN_INT (cst
);
2383 return "vspltisw %0,%1";
2385 else if (EASY_VECTOR_15_ADD_SELF (cst
))
2389 if (EASY_VECTOR_15 (cst
))
2391 operands
[1] = GEN_INT (cst
);
2392 return "vspltish %0,%1";
2394 else if (EASY_VECTOR_15_ADD_SELF (cst
))
2398 if (EASY_VECTOR_15 (cst
))
2400 operands
[1] = GEN_INT (cst
);
2401 return "vspltisb %0,%1";
2403 else if (EASY_VECTOR_15_ADD_SELF (cst
))
2415 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2416 pattern of V1DI, V4HI, and V2SF.
2418 FIXME: We should probably return # and add post reload
2419 splitters for these, but this way is so easy ;-).
2421 operands
[1] = GEN_INT (cst
);
2422 operands
[2] = GEN_INT (cst2
);
2424 return "li %0,%1\n\tevmergelo %0,%0,%0";
2426 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
2432 /* Return 1 if the operand is the constant 0. This works for scalars
2433 as well as vectors. */
2435 zero_constant (rtx op
, enum machine_mode mode
)
2437 return op
== CONST0_RTX (mode
);
2440 /* Return 1 if the operand is 0.0. */
2442 zero_fp_constant (rtx op
, enum machine_mode mode
)
2444 return GET_MODE_CLASS (mode
) == MODE_FLOAT
&& op
== CONST0_RTX (mode
);
2447 /* Return 1 if the operand is in volatile memory. Note that during
2448 the RTL generation phase, memory_operand does not return TRUE for
2449 volatile memory references. So this function allows us to
2450 recognize volatile references where its safe. */
2453 volatile_mem_operand (rtx op
, enum machine_mode mode
)
2455 if (GET_CODE (op
) != MEM
)
2458 if (!MEM_VOLATILE_P (op
))
2461 if (mode
!= GET_MODE (op
))
2464 if (reload_completed
)
2465 return memory_operand (op
, mode
);
2467 if (reload_in_progress
)
2468 return strict_memory_address_p (mode
, XEXP (op
, 0));
2470 return memory_address_p (mode
, XEXP (op
, 0));
2473 /* Return 1 if the operand is an offsettable memory operand. */
2476 offsettable_mem_operand (rtx op
, enum machine_mode mode
)
2478 return ((GET_CODE (op
) == MEM
)
2479 && offsettable_address_p (reload_completed
|| reload_in_progress
,
2480 mode
, XEXP (op
, 0)));
2483 /* Return 1 if the operand is either an easy FP constant (see above) or
2487 mem_or_easy_const_operand (rtx op
, enum machine_mode mode
)
2489 return memory_operand (op
, mode
) || easy_fp_constant (op
, mode
);
2492 /* Return 1 if the operand is either a non-special register or an item
2493 that can be used as the operand of a `mode' add insn. */
2496 add_operand (rtx op
, enum machine_mode mode
)
2498 if (GET_CODE (op
) == CONST_INT
)
2499 return (CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I')
2500 || CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'));
2502 return gpc_reg_operand (op
, mode
);
2505 /* Return 1 if OP is a constant but not a valid add_operand. */
2508 non_add_cint_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2510 return (GET_CODE (op
) == CONST_INT
2511 && !CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I')
2512 && !CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'));
2515 /* Return 1 if the operand is a non-special register or a constant that
2516 can be used as the operand of an OR or XOR insn on the RS/6000. */
2519 logical_operand (rtx op
, enum machine_mode mode
)
2521 HOST_WIDE_INT opl
, oph
;
2523 if (gpc_reg_operand (op
, mode
))
2526 if (GET_CODE (op
) == CONST_INT
)
2528 opl
= INTVAL (op
) & GET_MODE_MASK (mode
);
2530 #if HOST_BITS_PER_WIDE_INT <= 32
2531 if (GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
&& opl
< 0)
2535 else if (GET_CODE (op
) == CONST_DOUBLE
)
2537 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
2540 opl
= CONST_DOUBLE_LOW (op
);
2541 oph
= CONST_DOUBLE_HIGH (op
);
2548 return ((opl
& ~ (unsigned HOST_WIDE_INT
) 0xffff) == 0
2549 || (opl
& ~ (unsigned HOST_WIDE_INT
) 0xffff0000) == 0);
2552 /* Return 1 if C is a constant that is not a logical operand (as
2553 above), but could be split into one. */
2556 non_logical_cint_operand (rtx op
, enum machine_mode mode
)
2558 return ((GET_CODE (op
) == CONST_INT
|| GET_CODE (op
) == CONST_DOUBLE
)
2559 && ! logical_operand (op
, mode
)
2560 && reg_or_logical_cint_operand (op
, mode
));
2563 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
2564 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
2565 Reject all ones and all zeros, since these should have been optimized
2566 away and confuse the making of MB and ME. */
2569 mask_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2571 HOST_WIDE_INT c
, lsb
;
2573 if (GET_CODE (op
) != CONST_INT
)
2578 /* Fail in 64-bit mode if the mask wraps around because the upper
2579 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
2580 if (TARGET_POWERPC64
&& (c
& 0x80000001) == 0x80000001)
2583 /* We don't change the number of transitions by inverting,
2584 so make sure we start with the LS bit zero. */
2588 /* Reject all zeros or all ones. */
2592 /* Find the first transition. */
2595 /* Invert to look for a second transition. */
2598 /* Erase first transition. */
2601 /* Find the second transition (if any). */
2604 /* Match if all the bits above are 1's (or c is zero). */
2608 /* Return 1 for the PowerPC64 rlwinm corner case. */
2611 mask_operand_wrap (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2613 HOST_WIDE_INT c
, lsb
;
2615 if (GET_CODE (op
) != CONST_INT
)
2620 if ((c
& 0x80000001) != 0x80000001)
2634 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
2635 It is if there are no more than one 1->0 or 0->1 transitions.
2636 Reject all zeros, since zero should have been optimized away and
2637 confuses the making of MB and ME. */
2640 mask64_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2642 if (GET_CODE (op
) == CONST_INT
)
2644 HOST_WIDE_INT c
, lsb
;
2648 /* Reject all zeros. */
2652 /* We don't change the number of transitions by inverting,
2653 so make sure we start with the LS bit zero. */
2657 /* Find the transition, and check that all bits above are 1's. */
2660 /* Match if all the bits above are 1's (or c is zero). */
2666 /* Like mask64_operand, but allow up to three transitions. This
2667 predicate is used by insn patterns that generate two rldicl or
2668 rldicr machine insns. */
2671 mask64_2_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2673 if (GET_CODE (op
) == CONST_INT
)
2675 HOST_WIDE_INT c
, lsb
;
2679 /* Disallow all zeros. */
2683 /* We don't change the number of transitions by inverting,
2684 so make sure we start with the LS bit zero. */
2688 /* Find the first transition. */
2691 /* Invert to look for a second transition. */
2694 /* Erase first transition. */
2697 /* Find the second transition. */
2700 /* Invert to look for a third transition. */
2703 /* Erase second transition. */
2706 /* Find the third transition (if any). */
2709 /* Match if all the bits above are 1's (or c is zero). */
2715 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
2716 implement ANDing by the mask IN. */
2718 build_mask64_2_operands (rtx in
, rtx
*out
)
2720 #if HOST_BITS_PER_WIDE_INT >= 64
2721 unsigned HOST_WIDE_INT c
, lsb
, m1
, m2
;
2724 if (GET_CODE (in
) != CONST_INT
)
2730 /* Assume c initially something like 0x00fff000000fffff. The idea
2731 is to rotate the word so that the middle ^^^^^^ group of zeros
2732 is at the MS end and can be cleared with an rldicl mask. We then
2733 rotate back and clear off the MS ^^ group of zeros with a
2735 c
= ~c
; /* c == 0xff000ffffff00000 */
2736 lsb
= c
& -c
; /* lsb == 0x0000000000100000 */
2737 m1
= -lsb
; /* m1 == 0xfffffffffff00000 */
2738 c
= ~c
; /* c == 0x00fff000000fffff */
2739 c
&= -lsb
; /* c == 0x00fff00000000000 */
2740 lsb
= c
& -c
; /* lsb == 0x0000100000000000 */
2741 c
= ~c
; /* c == 0xff000fffffffffff */
2742 c
&= -lsb
; /* c == 0xff00000000000000 */
2744 while ((lsb
>>= 1) != 0)
2745 shift
++; /* shift == 44 on exit from loop */
2746 m1
<<= 64 - shift
; /* m1 == 0xffffff0000000000 */
2747 m1
= ~m1
; /* m1 == 0x000000ffffffffff */
2748 m2
= ~c
; /* m2 == 0x00ffffffffffffff */
2752 /* Assume c initially something like 0xff000f0000000000. The idea
2753 is to rotate the word so that the ^^^ middle group of zeros
2754 is at the LS end and can be cleared with an rldicr mask. We then
2755 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2757 lsb
= c
& -c
; /* lsb == 0x0000010000000000 */
2758 m2
= -lsb
; /* m2 == 0xffffff0000000000 */
2759 c
= ~c
; /* c == 0x00fff0ffffffffff */
2760 c
&= -lsb
; /* c == 0x00fff00000000000 */
2761 lsb
= c
& -c
; /* lsb == 0x0000100000000000 */
2762 c
= ~c
; /* c == 0xff000fffffffffff */
2763 c
&= -lsb
; /* c == 0xff00000000000000 */
2765 while ((lsb
>>= 1) != 0)
2766 shift
++; /* shift == 44 on exit from loop */
2767 m1
= ~c
; /* m1 == 0x00ffffffffffffff */
2768 m1
>>= shift
; /* m1 == 0x0000000000000fff */
2769 m1
= ~m1
; /* m1 == 0xfffffffffffff000 */
2772 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2773 masks will be all 1's. We are guaranteed more than one transition. */
2774 out
[0] = GEN_INT (64 - shift
);
2775 out
[1] = GEN_INT (m1
);
2776 out
[2] = GEN_INT (shift
);
2777 out
[3] = GEN_INT (m2
);
2785 /* Return 1 if the operand is either a non-special register or a constant
2786 that can be used as the operand of a PowerPC64 logical AND insn. */
2789 and64_operand (rtx op
, enum machine_mode mode
)
2791 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
2792 return (gpc_reg_operand (op
, mode
) || mask64_operand (op
, mode
));
2794 return (logical_operand (op
, mode
) || mask64_operand (op
, mode
));
2797 /* Like the above, but also match constants that can be implemented
2798 with two rldicl or rldicr insns. */
2801 and64_2_operand (rtx op
, enum machine_mode mode
)
2803 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
2804 return gpc_reg_operand (op
, mode
) || mask64_2_operand (op
, mode
);
2806 return logical_operand (op
, mode
) || mask64_2_operand (op
, mode
);
2809 /* Return 1 if the operand is either a non-special register or a
2810 constant that can be used as the operand of an RS/6000 logical AND insn. */
2813 and_operand (rtx op
, enum machine_mode mode
)
2815 if (fixed_regs
[CR0_REGNO
]) /* CR0 not available, don't do andi./andis. */
2816 return (gpc_reg_operand (op
, mode
) || mask_operand (op
, mode
));
2818 return (logical_operand (op
, mode
) || mask_operand (op
, mode
));
2821 /* Return 1 if the operand is a general register or memory operand. */
2824 reg_or_mem_operand (rtx op
, enum machine_mode mode
)
2826 return (gpc_reg_operand (op
, mode
)
2827 || memory_operand (op
, mode
)
2828 || macho_lo_sum_memory_operand (op
, mode
)
2829 || volatile_mem_operand (op
, mode
));
2832 /* Return 1 if the operand is a general register or memory operand without
2833 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2837 lwa_operand (rtx op
, enum machine_mode mode
)
2841 if (reload_completed
&& GET_CODE (inner
) == SUBREG
)
2842 inner
= SUBREG_REG (inner
);
2844 return gpc_reg_operand (inner
, mode
)
2845 || (memory_operand (inner
, mode
)
2846 && GET_CODE (XEXP (inner
, 0)) != PRE_INC
2847 && GET_CODE (XEXP (inner
, 0)) != PRE_DEC
2848 && (GET_CODE (XEXP (inner
, 0)) != PLUS
2849 || GET_CODE (XEXP (XEXP (inner
, 0), 1)) != CONST_INT
2850 || INTVAL (XEXP (XEXP (inner
, 0), 1)) % 4 == 0));
2853 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2856 symbol_ref_operand (rtx op
, enum machine_mode mode
)
2858 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
2861 return (GET_CODE (op
) == SYMBOL_REF
2862 && (DEFAULT_ABI
!= ABI_AIX
|| SYMBOL_REF_FUNCTION_P (op
)));
2865 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2866 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2869 call_operand (rtx op
, enum machine_mode mode
)
2871 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
2874 return (GET_CODE (op
) == SYMBOL_REF
2875 || (GET_CODE (op
) == REG
2876 && (REGNO (op
) == LINK_REGISTER_REGNUM
2877 || REGNO (op
) == COUNT_REGISTER_REGNUM
2878 || REGNO (op
) >= FIRST_PSEUDO_REGISTER
)));
2881 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2885 current_file_function_operand (rtx op
,
2886 enum machine_mode mode ATTRIBUTE_UNUSED
)
2888 return (GET_CODE (op
) == SYMBOL_REF
2889 && (DEFAULT_ABI
!= ABI_AIX
|| SYMBOL_REF_FUNCTION_P (op
))
2890 && (SYMBOL_REF_LOCAL_P (op
)
2891 || (op
== XEXP (DECL_RTL (current_function_decl
), 0))));
2894 /* Return 1 if this operand is a valid input for a move insn. */
2897 input_operand (rtx op
, enum machine_mode mode
)
2899 /* Memory is always valid. */
2900 if (memory_operand (op
, mode
))
2903 /* For floating-point, easy constants are valid. */
2904 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
2906 && easy_fp_constant (op
, mode
))
2909 /* Allow any integer constant. */
2910 if (GET_MODE_CLASS (mode
) == MODE_INT
2911 && (GET_CODE (op
) == CONST_INT
2912 || GET_CODE (op
) == CONST_DOUBLE
))
2915 /* Allow easy vector constants. */
2916 if (GET_CODE (op
) == CONST_VECTOR
2917 && easy_vector_constant (op
, mode
))
2920 /* For floating-point or multi-word mode, the only remaining valid type
2922 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
2923 || GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
2924 return register_operand (op
, mode
);
2926 /* The only cases left are integral modes one word or smaller (we
2927 do not get called for MODE_CC values). These can be in any
2929 if (register_operand (op
, mode
))
2932 /* A SYMBOL_REF referring to the TOC is valid. */
2933 if (legitimate_constant_pool_address_p (op
))
2936 /* A constant pool expression (relative to the TOC) is valid */
2937 if (toc_relative_expr_p (op
))
2940 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2942 if (DEFAULT_ABI
== ABI_V4
2943 && (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == CONST
)
2944 && small_data_operand (op
, Pmode
))
2951 /* Darwin, AIX increases natural record alignment to doubleword if the first
2952 field is an FP double while the FP fields remain word aligned. */
2955 rs6000_special_round_type_align (tree type
, int computed
, int specified
)
2957 tree field
= TYPE_FIELDS (type
);
2959 /* Skip all the static variables only if ABI is greater than
2961 while (field
!= NULL
&& TREE_CODE (field
) == VAR_DECL
)
2962 field
= TREE_CHAIN (field
);
2964 if (field
== NULL
|| field
== type
|| DECL_MODE (field
) != DFmode
)
2965 return MAX (computed
, specified
);
2967 return MAX (MAX (computed
, specified
), 64);
2970 /* Return 1 for an operand in small memory on V.4/eabi. */
2973 small_data_operand (rtx op ATTRIBUTE_UNUSED
,
2974 enum machine_mode mode ATTRIBUTE_UNUSED
)
2979 if (rs6000_sdata
== SDATA_NONE
|| rs6000_sdata
== SDATA_DATA
)
2982 if (DEFAULT_ABI
!= ABI_V4
)
2985 if (GET_CODE (op
) == SYMBOL_REF
)
2988 else if (GET_CODE (op
) != CONST
2989 || GET_CODE (XEXP (op
, 0)) != PLUS
2990 || GET_CODE (XEXP (XEXP (op
, 0), 0)) != SYMBOL_REF
2991 || GET_CODE (XEXP (XEXP (op
, 0), 1)) != CONST_INT
)
2996 rtx sum
= XEXP (op
, 0);
2997 HOST_WIDE_INT summand
;
2999 /* We have to be careful here, because it is the referenced address
3000 that must be 32k from _SDA_BASE_, not just the symbol. */
3001 summand
= INTVAL (XEXP (sum
, 1));
3002 if (summand
< 0 || (unsigned HOST_WIDE_INT
) summand
> g_switch_value
)
3005 sym_ref
= XEXP (sum
, 0);
3008 return SYMBOL_REF_SMALL_P (sym_ref
);
3014 /* Return true, if operand is a memory operand and has a
3015 displacement divisible by 4. */
3018 word_offset_memref_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
3023 if (!memory_operand (op
, mode
))
3026 addr
= XEXP (op
, 0);
3027 if (GET_CODE (addr
) == PLUS
3028 && GET_CODE (XEXP (addr
, 0)) == REG
3029 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
)
3030 off
= INTVAL (XEXP (addr
, 1));
3032 return (off
% 4) == 0;
3035 /* Return true if either operand is a general purpose register. */
3038 gpr_or_gpr_p (rtx op0
, rtx op1
)
3040 return ((REG_P (op0
) && INT_REGNO_P (REGNO (op0
)))
3041 || (REG_P (op1
) && INT_REGNO_P (REGNO (op1
))));
3045 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
3048 constant_pool_expr_1 (rtx op
, int *have_sym
, int *have_toc
)
3050 switch (GET_CODE(op
))
3053 if (RS6000_SYMBOL_REF_TLS_P (op
))
3055 else if (CONSTANT_POOL_ADDRESS_P (op
))
3057 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op
), Pmode
))
3065 else if (! strcmp (XSTR (op
, 0), toc_label_name
))
3074 return (constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
)
3075 && constant_pool_expr_1 (XEXP (op
, 1), have_sym
, have_toc
));
3077 return constant_pool_expr_1 (XEXP (op
, 0), have_sym
, have_toc
);
3086 constant_pool_expr_p (rtx op
)
3090 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_sym
;
3094 toc_relative_expr_p (rtx op
)
3098 return constant_pool_expr_1 (op
, &have_sym
, &have_toc
) && have_toc
;
3102 legitimate_constant_pool_address_p (rtx x
)
3105 && GET_CODE (x
) == PLUS
3106 && GET_CODE (XEXP (x
, 0)) == REG
3107 && (TARGET_MINIMAL_TOC
|| REGNO (XEXP (x
, 0)) == TOC_REGISTER
)
3108 && constant_pool_expr_p (XEXP (x
, 1)));
3112 legitimate_small_data_p (enum machine_mode mode
, rtx x
)
3114 return (DEFAULT_ABI
== ABI_V4
3115 && !flag_pic
&& !TARGET_TOC
3116 && (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == CONST
)
3117 && small_data_operand (x
, mode
));
3120 /* SPE offset addressing is limited to 5-bits worth of double words. */
3121 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
3124 rs6000_legitimate_offset_address_p (enum machine_mode mode
, rtx x
, int strict
)
3126 unsigned HOST_WIDE_INT offset
, extra
;
3128 if (GET_CODE (x
) != PLUS
)
3130 if (GET_CODE (XEXP (x
, 0)) != REG
)
3132 if (!INT_REG_OK_FOR_BASE_P (XEXP (x
, 0), strict
))
3134 if (legitimate_constant_pool_address_p (x
))
3136 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
3139 offset
= INTVAL (XEXP (x
, 1));
3147 /* AltiVec vector modes. Only reg+reg addressing is valid here,
3148 which leaves the only valid constant offset of zero, which by
3149 canonicalization rules is also invalid. */
3156 /* SPE vector modes. */
3157 return SPE_CONST_OFFSET_OK (offset
);
3161 if (mode
== DFmode
|| !TARGET_POWERPC64
)
3163 else if (offset
& 3)
3169 if (mode
== TFmode
|| !TARGET_POWERPC64
)
3171 else if (offset
& 3)
3182 return (offset
< 0x10000) && (offset
+ extra
< 0x10000);
3186 legitimate_indexed_address_p (rtx x
, int strict
)
3190 if (GET_CODE (x
) != PLUS
)
3196 if (!REG_P (op0
) || !REG_P (op1
))
3199 return ((INT_REG_OK_FOR_BASE_P (op0
, strict
)
3200 && INT_REG_OK_FOR_INDEX_P (op1
, strict
))
3201 || (INT_REG_OK_FOR_BASE_P (op1
, strict
)
3202 && INT_REG_OK_FOR_INDEX_P (op0
, strict
)));
3206 legitimate_indirect_address_p (rtx x
, int strict
)
3208 return GET_CODE (x
) == REG
&& INT_REG_OK_FOR_BASE_P (x
, strict
);
3212 macho_lo_sum_memory_operand (rtx x
, enum machine_mode mode
)
3214 if (!TARGET_MACHO
|| !flag_pic
3215 || mode
!= SImode
|| GET_CODE(x
) != MEM
)
3219 if (GET_CODE (x
) != LO_SUM
)
3221 if (GET_CODE (XEXP (x
, 0)) != REG
)
3223 if (!INT_REG_OK_FOR_BASE_P (XEXP (x
, 0), 0))
3227 return CONSTANT_P (x
);
3231 legitimate_lo_sum_address_p (enum machine_mode mode
, rtx x
, int strict
)
3233 if (GET_CODE (x
) != LO_SUM
)
3235 if (GET_CODE (XEXP (x
, 0)) != REG
)
3237 if (!INT_REG_OK_FOR_BASE_P (XEXP (x
, 0), strict
))
3241 if (TARGET_ELF
|| TARGET_MACHO
)
3243 if (DEFAULT_ABI
!= ABI_AIX
&& DEFAULT_ABI
!= ABI_DARWIN
&& flag_pic
)
3247 if (GET_MODE_NUNITS (mode
) != 1)
3249 if (GET_MODE_BITSIZE (mode
) > 64)
3252 return CONSTANT_P (x
);
3259 /* Try machine-dependent ways of modifying an illegitimate address
3260 to be legitimate. If we find one, return the new, valid address.
3261 This is used from only one place: `memory_address' in explow.c.
3263 OLDX is the address as it was before break_out_memory_refs was
3264 called. In some cases it is useful to look at this to decide what
3267 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
3269 It is always safe for this function to do nothing. It exists to
3270 recognize opportunities to optimize the output.
3272 On RS/6000, first check for the sum of a register with a constant
3273 integer that is out of range. If so, generate code to add the
3274 constant with the low-order 16 bits masked to the register and force
3275 this result into another register (this can be done with `cau').
3276 Then generate an address of REG+(CONST&0xffff), allowing for the
3277 possibility of bit 16 being a one.
3279 Then check for the sum of a register and something not constant, try to
3280 load the other things into a register and return the sum. */
3283 rs6000_legitimize_address (rtx x
, rtx oldx ATTRIBUTE_UNUSED
,
3284 enum machine_mode mode
)
3286 if (GET_CODE (x
) == SYMBOL_REF
)
3288 enum tls_model model
= SYMBOL_REF_TLS_MODEL (x
);
3290 return rs6000_legitimize_tls_address (x
, model
);
3293 if (GET_CODE (x
) == PLUS
3294 && GET_CODE (XEXP (x
, 0)) == REG
3295 && GET_CODE (XEXP (x
, 1)) == CONST_INT
3296 && (unsigned HOST_WIDE_INT
) (INTVAL (XEXP (x
, 1)) + 0x8000) >= 0x10000)
3298 HOST_WIDE_INT high_int
, low_int
;
3300 low_int
= ((INTVAL (XEXP (x
, 1)) & 0xffff) ^ 0x8000) - 0x8000;
3301 high_int
= INTVAL (XEXP (x
, 1)) - low_int
;
3302 sum
= force_operand (gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
3303 GEN_INT (high_int
)), 0);
3304 return gen_rtx_PLUS (Pmode
, sum
, GEN_INT (low_int
));
3306 else if (GET_CODE (x
) == PLUS
3307 && GET_CODE (XEXP (x
, 0)) == REG
3308 && GET_CODE (XEXP (x
, 1)) != CONST_INT
3309 && GET_MODE_NUNITS (mode
) == 1
3310 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3312 || (mode
!= DFmode
&& mode
!= TFmode
))
3313 && (TARGET_POWERPC64
|| mode
!= DImode
)
3316 return gen_rtx_PLUS (Pmode
, XEXP (x
, 0),
3317 force_reg (Pmode
, force_operand (XEXP (x
, 1), 0)));
3319 else if (ALTIVEC_VECTOR_MODE (mode
))
3323 /* Make sure both operands are registers. */
3324 if (GET_CODE (x
) == PLUS
)
3325 return gen_rtx_PLUS (Pmode
, force_reg (Pmode
, XEXP (x
, 0)),
3326 force_reg (Pmode
, XEXP (x
, 1)));
3328 reg
= force_reg (Pmode
, x
);
3331 else if (SPE_VECTOR_MODE (mode
))
3333 /* We accept [reg + reg] and [reg + OFFSET]. */
3335 if (GET_CODE (x
) == PLUS
)
3337 rtx op1
= XEXP (x
, 0);
3338 rtx op2
= XEXP (x
, 1);
3340 op1
= force_reg (Pmode
, op1
);
3342 if (GET_CODE (op2
) != REG
3343 && (GET_CODE (op2
) != CONST_INT
3344 || !SPE_CONST_OFFSET_OK (INTVAL (op2
))))
3345 op2
= force_reg (Pmode
, op2
);
3347 return gen_rtx_PLUS (Pmode
, op1
, op2
);
3350 return force_reg (Pmode
, x
);
3356 && GET_CODE (x
) != CONST_INT
3357 && GET_CODE (x
) != CONST_DOUBLE
3359 && GET_MODE_NUNITS (mode
) == 1
3360 && (GET_MODE_BITSIZE (mode
) <= 32
3361 || ((TARGET_HARD_FLOAT
&& TARGET_FPRS
) && mode
== DFmode
)))
3363 rtx reg
= gen_reg_rtx (Pmode
);
3364 emit_insn (gen_elf_high (reg
, x
));
3365 return gen_rtx_LO_SUM (Pmode
, reg
, x
);
3367 else if (TARGET_MACHO
&& TARGET_32BIT
&& TARGET_NO_TOC
3370 && ! MACHO_DYNAMIC_NO_PIC_P
3372 && GET_CODE (x
) != CONST_INT
3373 && GET_CODE (x
) != CONST_DOUBLE
3375 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
) || mode
!= DFmode
)
3379 rtx reg
= gen_reg_rtx (Pmode
);
3380 emit_insn (gen_macho_high (reg
, x
));
3381 return gen_rtx_LO_SUM (Pmode
, reg
, x
);
3384 && constant_pool_expr_p (x
)
3385 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), Pmode
))
3387 return create_TOC_reference (x
);
3393 /* This is called from dwarf2out.c via ASM_OUTPUT_DWARF_DTPREL.
3394 We need to emit DTP-relative relocations. */
3397 rs6000_output_dwarf_dtprel (FILE *file
, int size
, rtx x
)
3402 fputs ("\t.long\t", file
);
3405 fputs (DOUBLE_INT_ASM_OP
, file
);
3410 output_addr_const (file
, x
);
3411 fputs ("@dtprel+0x8000", file
);
3414 /* Construct the SYMBOL_REF for the tls_get_addr function. */
3416 static GTY(()) rtx rs6000_tls_symbol
;
3418 rs6000_tls_get_addr (void)
3420 if (!rs6000_tls_symbol
)
3421 rs6000_tls_symbol
= init_one_libfunc ("__tls_get_addr");
3423 return rs6000_tls_symbol
;
3426 /* Construct the SYMBOL_REF for TLS GOT references. */
3428 static GTY(()) rtx rs6000_got_symbol
;
3430 rs6000_got_sym (void)
3432 if (!rs6000_got_symbol
)
3434 rs6000_got_symbol
= gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
3435 SYMBOL_REF_FLAGS (rs6000_got_symbol
) |= SYMBOL_FLAG_LOCAL
;
3436 SYMBOL_REF_FLAGS (rs6000_got_symbol
) |= SYMBOL_FLAG_EXTERNAL
;
3439 return rs6000_got_symbol
;
3442 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3443 this (thread-local) address. */
3446 rs6000_legitimize_tls_address (rtx addr
, enum tls_model model
)
3450 dest
= gen_reg_rtx (Pmode
);
3451 if (model
== TLS_MODEL_LOCAL_EXEC
&& rs6000_tls_size
== 16)
3457 tlsreg
= gen_rtx_REG (Pmode
, 13);
3458 insn
= gen_tls_tprel_64 (dest
, tlsreg
, addr
);
3462 tlsreg
= gen_rtx_REG (Pmode
, 2);
3463 insn
= gen_tls_tprel_32 (dest
, tlsreg
, addr
);
3467 else if (model
== TLS_MODEL_LOCAL_EXEC
&& rs6000_tls_size
== 32)
3471 tmp
= gen_reg_rtx (Pmode
);
3474 tlsreg
= gen_rtx_REG (Pmode
, 13);
3475 insn
= gen_tls_tprel_ha_64 (tmp
, tlsreg
, addr
);
3479 tlsreg
= gen_rtx_REG (Pmode
, 2);
3480 insn
= gen_tls_tprel_ha_32 (tmp
, tlsreg
, addr
);
3484 insn
= gen_tls_tprel_lo_64 (dest
, tmp
, addr
);
3486 insn
= gen_tls_tprel_lo_32 (dest
, tmp
, addr
);
3491 rtx r3
, got
, tga
, tmp1
, tmp2
, eqv
;
3494 got
= gen_rtx_REG (Pmode
, TOC_REGISTER
);
3498 got
= gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
);
3501 rtx gsym
= rs6000_got_sym ();
3502 got
= gen_reg_rtx (Pmode
);
3504 rs6000_emit_move (got
, gsym
, Pmode
);
3508 static int tls_got_labelno
= 0;
3509 rtx tempLR
, lab
, tmp3
, mem
;
3512 ASM_GENERATE_INTERNAL_LABEL (buf
, "LTLS", tls_got_labelno
++);
3513 lab
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
3514 tempLR
= gen_reg_rtx (Pmode
);
3515 tmp1
= gen_reg_rtx (Pmode
);
3516 tmp2
= gen_reg_rtx (Pmode
);
3517 tmp3
= gen_reg_rtx (Pmode
);
3518 mem
= gen_const_mem (Pmode
, tmp1
);
3520 first
= emit_insn (gen_load_toc_v4_PIC_1b (tempLR
, lab
,
3522 emit_move_insn (tmp1
, tempLR
);
3523 emit_move_insn (tmp2
, mem
);
3524 emit_insn (gen_addsi3 (tmp3
, tmp1
, tmp2
));
3525 last
= emit_move_insn (got
, tmp3
);
3526 REG_NOTES (last
) = gen_rtx_EXPR_LIST (REG_EQUAL
, gsym
,
3528 REG_NOTES (first
) = gen_rtx_INSN_LIST (REG_LIBCALL
, last
,
3530 REG_NOTES (last
) = gen_rtx_INSN_LIST (REG_RETVAL
, first
,
3536 if (model
== TLS_MODEL_GLOBAL_DYNAMIC
)
3538 r3
= gen_rtx_REG (Pmode
, 3);
3540 insn
= gen_tls_gd_64 (r3
, got
, addr
);
3542 insn
= gen_tls_gd_32 (r3
, got
, addr
);
3545 tga
= gen_rtx_MEM (Pmode
, rs6000_tls_get_addr ());
3546 insn
= gen_call_value (r3
, tga
, const0_rtx
, const0_rtx
);
3547 insn
= emit_call_insn (insn
);
3548 CONST_OR_PURE_CALL_P (insn
) = 1;
3549 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), r3
);
3550 insn
= get_insns ();
3552 emit_libcall_block (insn
, dest
, r3
, addr
);
3554 else if (model
== TLS_MODEL_LOCAL_DYNAMIC
)
3556 r3
= gen_rtx_REG (Pmode
, 3);
3558 insn
= gen_tls_ld_64 (r3
, got
);
3560 insn
= gen_tls_ld_32 (r3
, got
);
3563 tga
= gen_rtx_MEM (Pmode
, rs6000_tls_get_addr ());
3564 insn
= gen_call_value (r3
, tga
, const0_rtx
, const0_rtx
);
3565 insn
= emit_call_insn (insn
);
3566 CONST_OR_PURE_CALL_P (insn
) = 1;
3567 use_reg (&CALL_INSN_FUNCTION_USAGE (insn
), r3
);
3568 insn
= get_insns ();
3570 tmp1
= gen_reg_rtx (Pmode
);
3571 eqv
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
),
3573 emit_libcall_block (insn
, tmp1
, r3
, eqv
);
3574 if (rs6000_tls_size
== 16)
3577 insn
= gen_tls_dtprel_64 (dest
, tmp1
, addr
);
3579 insn
= gen_tls_dtprel_32 (dest
, tmp1
, addr
);
3581 else if (rs6000_tls_size
== 32)
3583 tmp2
= gen_reg_rtx (Pmode
);
3585 insn
= gen_tls_dtprel_ha_64 (tmp2
, tmp1
, addr
);
3587 insn
= gen_tls_dtprel_ha_32 (tmp2
, tmp1
, addr
);
3590 insn
= gen_tls_dtprel_lo_64 (dest
, tmp2
, addr
);
3592 insn
= gen_tls_dtprel_lo_32 (dest
, tmp2
, addr
);
3596 tmp2
= gen_reg_rtx (Pmode
);
3598 insn
= gen_tls_got_dtprel_64 (tmp2
, got
, addr
);
3600 insn
= gen_tls_got_dtprel_32 (tmp2
, got
, addr
);
3602 insn
= gen_rtx_SET (Pmode
, dest
,
3603 gen_rtx_PLUS (Pmode
, tmp2
, tmp1
));
3609 /* IE, or 64 bit offset LE. */
3610 tmp2
= gen_reg_rtx (Pmode
);
3612 insn
= gen_tls_got_tprel_64 (tmp2
, got
, addr
);
3614 insn
= gen_tls_got_tprel_32 (tmp2
, got
, addr
);
3617 insn
= gen_tls_tls_64 (dest
, tmp2
, addr
);
3619 insn
= gen_tls_tls_32 (dest
, tmp2
, addr
);
3627 /* Return 1 if X is a SYMBOL_REF for a TLS symbol. This is used in
3628 instruction definitions. */
3631 rs6000_tls_symbol_ref (rtx x
, enum machine_mode mode ATTRIBUTE_UNUSED
)
3633 return RS6000_SYMBOL_REF_TLS_P (x
);
3636 /* Return 1 if X contains a thread-local symbol. */
3639 rs6000_tls_referenced_p (rtx x
)
3641 if (! TARGET_HAVE_TLS
)
3644 return for_each_rtx (&x
, &rs6000_tls_symbol_ref_1
, 0);
3647 /* Return 1 if *X is a thread-local symbol. This is the same as
3648 rs6000_tls_symbol_ref except for the type of the unused argument. */
3651 rs6000_tls_symbol_ref_1 (rtx
*x
, void *data ATTRIBUTE_UNUSED
)
3653 return RS6000_SYMBOL_REF_TLS_P (*x
);
3656 /* The convention appears to be to define this wherever it is used.
3657 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3658 is now used here. */
3659 #ifndef REG_MODE_OK_FOR_BASE_P
3660 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3663 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3664 replace the input X, or the original X if no replacement is called for.
3665 The output parameter *WIN is 1 if the calling macro should goto WIN,
3668 For RS/6000, we wish to handle large displacements off a base
3669 register by splitting the addend across an addiu/addis and the mem insn.
3670 This cuts number of extra insns needed from 3 to 1.
3672 On Darwin, we use this to generate code for floating point constants.
3673 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3674 The Darwin code is inside #if TARGET_MACHO because only then is
3675 machopic_function_base_name() defined. */
3677 rs6000_legitimize_reload_address (rtx x
, enum machine_mode mode
,
3678 int opnum
, int type
, int ind_levels ATTRIBUTE_UNUSED
, int *win
)
3680 /* We must recognize output that we have already generated ourselves. */
3681 if (GET_CODE (x
) == PLUS
3682 && GET_CODE (XEXP (x
, 0)) == PLUS
3683 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
3684 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3685 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
3687 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
3688 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
3689 opnum
, (enum reload_type
)type
);
3695 if (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
3696 && GET_CODE (x
) == LO_SUM
3697 && GET_CODE (XEXP (x
, 0)) == PLUS
3698 && XEXP (XEXP (x
, 0), 0) == pic_offset_table_rtx
3699 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == HIGH
3700 && GET_CODE (XEXP (XEXP (XEXP (x
, 0), 1), 0)) == CONST
3701 && XEXP (XEXP (XEXP (x
, 0), 1), 0) == XEXP (x
, 1)
3702 && GET_CODE (XEXP (XEXP (x
, 1), 0)) == MINUS
3703 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 0)) == SYMBOL_REF
3704 && GET_CODE (XEXP (XEXP (XEXP (x
, 1), 0), 1)) == SYMBOL_REF
)
3706 /* Result of previous invocation of this function on Darwin
3707 floating point constant. */
3708 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
3709 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
3710 opnum
, (enum reload_type
)type
);
3715 if (GET_CODE (x
) == PLUS
3716 && GET_CODE (XEXP (x
, 0)) == REG
3717 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
3718 && REG_MODE_OK_FOR_BASE_P (XEXP (x
, 0), mode
)
3719 && GET_CODE (XEXP (x
, 1)) == CONST_INT
3720 && !SPE_VECTOR_MODE (mode
)
3721 && !ALTIVEC_VECTOR_MODE (mode
))
3723 HOST_WIDE_INT val
= INTVAL (XEXP (x
, 1));
3724 HOST_WIDE_INT low
= ((val
& 0xffff) ^ 0x8000) - 0x8000;
3726 = (((val
- low
) & 0xffffffff) ^ 0x80000000) - 0x80000000;
3728 /* Check for 32-bit overflow. */
3729 if (high
+ low
!= val
)
3735 /* Reload the high part into a base reg; leave the low part
3736 in the mem directly. */
3738 x
= gen_rtx_PLUS (GET_MODE (x
),
3739 gen_rtx_PLUS (GET_MODE (x
), XEXP (x
, 0),
3743 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
3744 BASE_REG_CLASS
, GET_MODE (x
), VOIDmode
, 0, 0,
3745 opnum
, (enum reload_type
)type
);
3750 if (GET_CODE (x
) == SYMBOL_REF
3751 && DEFAULT_ABI
== ABI_DARWIN
3752 && !ALTIVEC_VECTOR_MODE (mode
)
3753 && (flag_pic
|| MACHO_DYNAMIC_NO_PIC_P
)
3754 /* Don't do this for TFmode, since the result isn't offsettable. */
3759 rtx offset
= gen_rtx_CONST (Pmode
,
3760 gen_rtx_MINUS (Pmode
, x
,
3761 machopic_function_base_sym ()));
3762 x
= gen_rtx_LO_SUM (GET_MODE (x
),
3763 gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
,
3764 gen_rtx_HIGH (Pmode
, offset
)), offset
);
3767 x
= gen_rtx_LO_SUM (GET_MODE (x
),
3768 gen_rtx_HIGH (Pmode
, x
), x
);
3770 push_reload (XEXP (x
, 0), NULL_RTX
, &XEXP (x
, 0), NULL
,
3771 BASE_REG_CLASS
, Pmode
, VOIDmode
, 0, 0,
3772 opnum
, (enum reload_type
)type
);
3778 && constant_pool_expr_p (x
)
3779 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x
), mode
))
3781 (x
) = create_TOC_reference (x
);
3789 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3790 that is a valid memory address for an instruction.
3791 The MODE argument is the machine mode for the MEM expression
3792 that wants to use this address.
3794 On the RS/6000, there are four valid address: a SYMBOL_REF that
3795 refers to a constant pool entry of an address (or the sum of it
3796 plus a constant), a short (16-bit signed) constant plus a register,
3797 the sum of two registers, or a register indirect, possibly with an
3798 auto-increment. For DFmode and DImode with a constant plus register,
3799 we must ensure that both words are addressable or PowerPC64 with offset
3802 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3803 32-bit DImode, TImode, TFmode), indexed addressing cannot be used because
3804 adjacent memory cells are accessed by adding word-sized offsets
3805 during assembly output. */
3807 rs6000_legitimate_address (enum machine_mode mode
, rtx x
, int reg_ok_strict
)
3809 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
3811 && ALTIVEC_VECTOR_MODE (mode
)
3812 && GET_CODE (x
) == AND
3813 && GET_CODE (XEXP (x
, 1)) == CONST_INT
3814 && INTVAL (XEXP (x
, 1)) == -16)
3817 if (RS6000_SYMBOL_REF_TLS_P (x
))
3819 if (legitimate_indirect_address_p (x
, reg_ok_strict
))
3821 if ((GET_CODE (x
) == PRE_INC
|| GET_CODE (x
) == PRE_DEC
)
3822 && !ALTIVEC_VECTOR_MODE (mode
)
3823 && !SPE_VECTOR_MODE (mode
)
3825 && legitimate_indirect_address_p (XEXP (x
, 0), reg_ok_strict
))
3827 if (legitimate_small_data_p (mode
, x
))
3829 if (legitimate_constant_pool_address_p (x
))
3831 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3833 && GET_CODE (x
) == PLUS
3834 && GET_CODE (XEXP (x
, 0)) == REG
3835 && (XEXP (x
, 0) == virtual_stack_vars_rtx
3836 || XEXP (x
, 0) == arg_pointer_rtx
)
3837 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
3839 if (rs6000_legitimate_offset_address_p (mode
, x
, reg_ok_strict
))
3843 && ((TARGET_HARD_FLOAT
&& TARGET_FPRS
)
3845 || (mode
!= DFmode
&& mode
!= TFmode
))
3846 && (TARGET_POWERPC64
|| mode
!= DImode
)
3847 && legitimate_indexed_address_p (x
, reg_ok_strict
))
3849 if (legitimate_lo_sum_address_p (mode
, x
, reg_ok_strict
))
3854 /* Go to LABEL if ADDR (a legitimate address expression)
3855 has an effect that depends on the machine mode it is used for.
3857 On the RS/6000 this is true of all integral offsets (since AltiVec
3858 modes don't allow them) or is a pre-increment or decrement.
3860 ??? Except that due to conceptual problems in offsettable_address_p
3861 we can't really report the problems of integral offsets. So leave
3862 this assuming that the adjustable offset must be valid for the
3863 sub-words of a TFmode operand, which is what we had before. */
3866 rs6000_mode_dependent_address (rtx addr
)
3868 switch (GET_CODE (addr
))
3871 if (GET_CODE (XEXP (addr
, 1)) == CONST_INT
)
3873 unsigned HOST_WIDE_INT val
= INTVAL (XEXP (addr
, 1));
3874 return val
+ 12 + 0x8000 >= 0x10000;
3883 return TARGET_UPDATE
;
3892 /* Return number of consecutive hard regs needed starting at reg REGNO
3893 to hold something of mode MODE.
3894 This is ordinarily the length in words of a value of mode MODE
3895 but can be less for certain modes in special long registers.
3897 For the SPE, GPRs are 64 bits but only 32 bits are visible in
3898 scalar instructions. The upper 32 bits are only available to the
3901 POWER and PowerPC GPRs hold 32 bits worth;
3902 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
3905 rs6000_hard_regno_nregs (int regno
, enum machine_mode mode
)
3907 if (FP_REGNO_P (regno
))
3908 return (GET_MODE_SIZE (mode
) + UNITS_PER_FP_WORD
- 1) / UNITS_PER_FP_WORD
;
3910 if (SPE_SIMD_REGNO_P (regno
) && TARGET_SPE
&& SPE_VECTOR_MODE (mode
))
3911 return (GET_MODE_SIZE (mode
) + UNITS_PER_SPE_WORD
- 1) / UNITS_PER_SPE_WORD
;
3913 if (ALTIVEC_REGNO_P (regno
))
3915 (GET_MODE_SIZE (mode
) + UNITS_PER_ALTIVEC_WORD
- 1) / UNITS_PER_ALTIVEC_WORD
;
3917 return (GET_MODE_SIZE (mode
) + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
3920 /* Change register usage conditional on target flags. */
3922 rs6000_conditional_register_usage (void)
3926 /* Set MQ register fixed (already call_used) if not POWER
3927 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
3932 /* 64-bit AIX reserves GPR13 for thread-private data. */
3934 fixed_regs
[13] = call_used_regs
[13]
3935 = call_really_used_regs
[13] = 1;
3937 /* Conditionally disable FPRs. */
3938 if (TARGET_SOFT_FLOAT
|| !TARGET_FPRS
)
3939 for (i
= 32; i
< 64; i
++)
3940 fixed_regs
[i
] = call_used_regs
[i
]
3941 = call_really_used_regs
[i
] = 1;
3943 if (DEFAULT_ABI
== ABI_V4
3944 && PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
3946 fixed_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
3948 if (DEFAULT_ABI
== ABI_V4
3949 && PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
3951 fixed_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
]
3952 = call_used_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
]
3953 = call_really_used_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
3955 if (DEFAULT_ABI
== ABI_DARWIN
3956 && PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
)
3957 global_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
]
3958 = fixed_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
]
3959 = call_used_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
]
3960 = call_really_used_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
3962 if (TARGET_TOC
&& TARGET_MINIMAL_TOC
)
3963 fixed_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
]
3964 = call_used_regs
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
3967 global_regs
[VSCR_REGNO
] = 1;
3971 global_regs
[SPEFSCR_REGNO
] = 1;
3972 fixed_regs
[FIXED_SCRATCH
]
3973 = call_used_regs
[FIXED_SCRATCH
]
3974 = call_really_used_regs
[FIXED_SCRATCH
] = 1;
3977 if (! TARGET_ALTIVEC
)
3979 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
3980 fixed_regs
[i
] = call_used_regs
[i
] = call_really_used_regs
[i
] = 1;
3981 call_really_used_regs
[VRSAVE_REGNO
] = 1;
3984 if (TARGET_ALTIVEC_ABI
)
3985 for (i
= FIRST_ALTIVEC_REGNO
; i
< FIRST_ALTIVEC_REGNO
+ 20; ++i
)
3986 call_used_regs
[i
] = call_really_used_regs
[i
] = 1;
3989 /* Try to output insns to set TARGET equal to the constant C if it can
3990 be done in less than N insns. Do all computations in MODE.
3991 Returns the place where the output has been placed if it can be
3992 done and the insns have been emitted. If it would take more than N
3993 insns, zero is returned and no insns and emitted. */
3996 rs6000_emit_set_const (rtx dest
, enum machine_mode mode
,
3997 rtx source
, int n ATTRIBUTE_UNUSED
)
3999 rtx result
, insn
, set
;
4000 HOST_WIDE_INT c0
, c1
;
4002 if (mode
== QImode
|| mode
== HImode
)
4005 dest
= gen_reg_rtx (mode
);
4006 emit_insn (gen_rtx_SET (VOIDmode
, dest
, source
));
4009 else if (mode
== SImode
)
4011 result
= no_new_pseudos
? dest
: gen_reg_rtx (SImode
);
4013 emit_insn (gen_rtx_SET (VOIDmode
, result
,
4014 GEN_INT (INTVAL (source
)
4015 & (~ (HOST_WIDE_INT
) 0xffff))));
4016 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
4017 gen_rtx_IOR (SImode
, result
,
4018 GEN_INT (INTVAL (source
) & 0xffff))));
4021 else if (mode
== DImode
)
4023 if (GET_CODE (source
) == CONST_INT
)
4025 c0
= INTVAL (source
);
4028 else if (GET_CODE (source
) == CONST_DOUBLE
)
4030 #if HOST_BITS_PER_WIDE_INT >= 64
4031 c0
= CONST_DOUBLE_LOW (source
);
4034 c0
= CONST_DOUBLE_LOW (source
);
4035 c1
= CONST_DOUBLE_HIGH (source
);
4041 result
= rs6000_emit_set_long_const (dest
, c0
, c1
);
4046 insn
= get_last_insn ();
4047 set
= single_set (insn
);
4048 if (! CONSTANT_P (SET_SRC (set
)))
4049 set_unique_reg_note (insn
, REG_EQUAL
, source
);
4054 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
4055 fall back to a straight forward decomposition. We do this to avoid
4056 exponential run times encountered when looking for longer sequences
4057 with rs6000_emit_set_const. */
4059 rs6000_emit_set_long_const (rtx dest
, HOST_WIDE_INT c1
, HOST_WIDE_INT c2
)
4061 if (!TARGET_POWERPC64
)
4063 rtx operand1
, operand2
;
4065 operand1
= operand_subword_force (dest
, WORDS_BIG_ENDIAN
== 0,
4067 operand2
= operand_subword_force (dest
, WORDS_BIG_ENDIAN
!= 0,
4069 emit_move_insn (operand1
, GEN_INT (c1
));
4070 emit_move_insn (operand2
, GEN_INT (c2
));
4074 HOST_WIDE_INT ud1
, ud2
, ud3
, ud4
;
4077 ud2
= (c1
& 0xffff0000) >> 16;
4078 #if HOST_BITS_PER_WIDE_INT >= 64
4082 ud4
= (c2
& 0xffff0000) >> 16;
4084 if ((ud4
== 0xffff && ud3
== 0xffff && ud2
== 0xffff && (ud1
& 0x8000))
4085 || (ud4
== 0 && ud3
== 0 && ud2
== 0 && ! (ud1
& 0x8000)))
4088 emit_move_insn (dest
, GEN_INT (((ud1
^ 0x8000) - 0x8000)));
4090 emit_move_insn (dest
, GEN_INT (ud1
));
4093 else if ((ud4
== 0xffff && ud3
== 0xffff && (ud2
& 0x8000))
4094 || (ud4
== 0 && ud3
== 0 && ! (ud2
& 0x8000)))
4097 emit_move_insn (dest
, GEN_INT (((ud2
<< 16) ^ 0x80000000)
4100 emit_move_insn (dest
, GEN_INT (ud2
<< 16));
4102 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
4104 else if ((ud4
== 0xffff && (ud3
& 0x8000))
4105 || (ud4
== 0 && ! (ud3
& 0x8000)))
4108 emit_move_insn (dest
, GEN_INT (((ud3
<< 16) ^ 0x80000000)
4111 emit_move_insn (dest
, GEN_INT (ud3
<< 16));
4114 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud2
)));
4115 emit_move_insn (dest
, gen_rtx_ASHIFT (DImode
, dest
, GEN_INT (16)));
4117 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
4122 emit_move_insn (dest
, GEN_INT (((ud4
<< 16) ^ 0x80000000)
4125 emit_move_insn (dest
, GEN_INT (ud4
<< 16));
4128 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud3
)));
4130 emit_move_insn (dest
, gen_rtx_ASHIFT (DImode
, dest
, GEN_INT (32)));
4132 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
,
4133 GEN_INT (ud2
<< 16)));
4135 emit_move_insn (dest
, gen_rtx_IOR (DImode
, dest
, GEN_INT (ud1
)));
4141 /* Helper for the following. Get rid of [r+r] memory refs
4142 in cases where it won't work (TImode, TFmode). */
4145 rs6000_eliminate_indexed_memrefs (rtx operands
[2])
4147 if (GET_CODE (operands
[0]) == MEM
4148 && GET_CODE (XEXP (operands
[0], 0)) != REG
4149 && ! reload_in_progress
)
4151 = replace_equiv_address (operands
[0],
4152 copy_addr_to_reg (XEXP (operands
[0], 0)));
4154 if (GET_CODE (operands
[1]) == MEM
4155 && GET_CODE (XEXP (operands
[1], 0)) != REG
4156 && ! reload_in_progress
)
4158 = replace_equiv_address (operands
[1],
4159 copy_addr_to_reg (XEXP (operands
[1], 0)));
4162 /* Emit a move from SOURCE to DEST in mode MODE. */
4164 rs6000_emit_move (rtx dest
, rtx source
, enum machine_mode mode
)
4168 operands
[1] = source
;
4170 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
4171 if (GET_CODE (operands
[1]) == CONST_DOUBLE
4172 && ! FLOAT_MODE_P (mode
)
4173 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
4175 /* FIXME. This should never happen. */
4176 /* Since it seems that it does, do the safe thing and convert
4178 operands
[1] = gen_int_mode (CONST_DOUBLE_LOW (operands
[1]), mode
);
4180 if (GET_CODE (operands
[1]) == CONST_DOUBLE
4181 && ! FLOAT_MODE_P (mode
)
4182 && ((CONST_DOUBLE_HIGH (operands
[1]) == 0
4183 && CONST_DOUBLE_LOW (operands
[1]) >= 0)
4184 || (CONST_DOUBLE_HIGH (operands
[1]) == -1
4185 && CONST_DOUBLE_LOW (operands
[1]) < 0)))
4188 /* Check if GCC is setting up a block move that will end up using FP
4189 registers as temporaries. We must make sure this is acceptable. */
4190 if (GET_CODE (operands
[0]) == MEM
4191 && GET_CODE (operands
[1]) == MEM
4193 && (SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[0]))
4194 || SLOW_UNALIGNED_ACCESS (DImode
, MEM_ALIGN (operands
[1])))
4195 && ! (SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[0]) > 32
4196 ? 32 : MEM_ALIGN (operands
[0])))
4197 || SLOW_UNALIGNED_ACCESS (SImode
, (MEM_ALIGN (operands
[1]) > 32
4199 : MEM_ALIGN (operands
[1]))))
4200 && ! MEM_VOLATILE_P (operands
[0])
4201 && ! MEM_VOLATILE_P (operands
[1]))
4203 emit_move_insn (adjust_address (operands
[0], SImode
, 0),
4204 adjust_address (operands
[1], SImode
, 0));
4205 emit_move_insn (adjust_address (operands
[0], SImode
, 4),
4206 adjust_address (operands
[1], SImode
, 4));
4210 if (!no_new_pseudos
)
4212 if (GET_CODE (operands
[1]) == MEM
&& optimize
> 0
4213 && (mode
== QImode
|| mode
== HImode
|| mode
== SImode
)
4214 && GET_MODE_SIZE (mode
) < GET_MODE_SIZE (word_mode
))
4216 rtx reg
= gen_reg_rtx (word_mode
);
4218 emit_insn (gen_rtx_SET (word_mode
, reg
,
4219 gen_rtx_ZERO_EXTEND (word_mode
,
4221 operands
[1] = gen_lowpart (mode
, reg
);
4223 if (GET_CODE (operands
[0]) != REG
)
4224 operands
[1] = force_reg (mode
, operands
[1]);
4227 if (mode
== SFmode
&& ! TARGET_POWERPC
4228 && TARGET_HARD_FLOAT
&& TARGET_FPRS
4229 && GET_CODE (operands
[0]) == MEM
)
4233 if (reload_in_progress
|| reload_completed
)
4234 regnum
= true_regnum (operands
[1]);
4235 else if (GET_CODE (operands
[1]) == REG
)
4236 regnum
= REGNO (operands
[1]);
4240 /* If operands[1] is a register, on POWER it may have
4241 double-precision data in it, so truncate it to single
4243 if (FP_REGNO_P (regnum
) || regnum
>= FIRST_PSEUDO_REGISTER
)
4246 newreg
= (no_new_pseudos
? operands
[1] : gen_reg_rtx (mode
));
4247 emit_insn (gen_aux_truncdfsf2 (newreg
, operands
[1]));
4248 operands
[1] = newreg
;
4252 /* Recognize the case where operand[1] is a reference to thread-local
4253 data and load its address to a register. */
4254 if (GET_CODE (operands
[1]) == SYMBOL_REF
)
4256 enum tls_model model
= SYMBOL_REF_TLS_MODEL (operands
[1]);
4258 operands
[1] = rs6000_legitimize_tls_address (operands
[1], model
);
4261 /* Handle the case where reload calls us with an invalid address. */
4262 if (reload_in_progress
&& mode
== Pmode
4263 && (! general_operand (operands
[1], mode
)
4264 || ! nonimmediate_operand (operands
[0], mode
)))
4267 /* 128-bit constant floating-point values on Darwin should really be
4268 loaded as two parts. */
4269 if ((DEFAULT_ABI
== ABI_AIX
|| DEFAULT_ABI
== ABI_DARWIN
)
4270 && TARGET_HARD_FLOAT
&& TARGET_FPRS
&& TARGET_LONG_DOUBLE_128
4271 && mode
== TFmode
&& GET_CODE (operands
[1]) == CONST_DOUBLE
)
4273 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
4274 know how to get a DFmode SUBREG of a TFmode. */
4275 rs6000_emit_move (simplify_gen_subreg (DImode
, operands
[0], mode
, 0),
4276 simplify_gen_subreg (DImode
, operands
[1], mode
, 0),
4278 rs6000_emit_move (simplify_gen_subreg (DImode
, operands
[0], mode
,
4279 GET_MODE_SIZE (DImode
)),
4280 simplify_gen_subreg (DImode
, operands
[1], mode
,
4281 GET_MODE_SIZE (DImode
)),
4286 /* FIXME: In the long term, this switch statement should go away
4287 and be replaced by a sequence of tests based on things like
4293 if (CONSTANT_P (operands
[1])
4294 && GET_CODE (operands
[1]) != CONST_INT
)
4295 operands
[1] = force_const_mem (mode
, operands
[1]);
4299 rs6000_eliminate_indexed_memrefs (operands
);
4304 if (CONSTANT_P (operands
[1])
4305 && ! easy_fp_constant (operands
[1], mode
))
4306 operands
[1] = force_const_mem (mode
, operands
[1]);
4317 if (CONSTANT_P (operands
[1])
4318 && !easy_vector_constant (operands
[1], mode
))
4319 operands
[1] = force_const_mem (mode
, operands
[1]);
4324 /* Use default pattern for address of ELF small data */
4327 && DEFAULT_ABI
== ABI_V4
4328 && (GET_CODE (operands
[1]) == SYMBOL_REF
4329 || GET_CODE (operands
[1]) == CONST
)
4330 && small_data_operand (operands
[1], mode
))
4332 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
4336 if (DEFAULT_ABI
== ABI_V4
4337 && mode
== Pmode
&& mode
== SImode
4338 && flag_pic
== 1 && got_operand (operands
[1], mode
))
4340 emit_insn (gen_movsi_got (operands
[0], operands
[1]));
4344 if ((TARGET_ELF
|| DEFAULT_ABI
== ABI_DARWIN
)
4348 && CONSTANT_P (operands
[1])
4349 && GET_CODE (operands
[1]) != HIGH
4350 && GET_CODE (operands
[1]) != CONST_INT
)
4352 rtx target
= (no_new_pseudos
? operands
[0] : gen_reg_rtx (mode
));
4354 /* If this is a function address on -mcall-aixdesc,
4355 convert it to the address of the descriptor. */
4356 if (DEFAULT_ABI
== ABI_AIX
4357 && GET_CODE (operands
[1]) == SYMBOL_REF
4358 && XSTR (operands
[1], 0)[0] == '.')
4360 const char *name
= XSTR (operands
[1], 0);
4362 while (*name
== '.')
4364 new_ref
= gen_rtx_SYMBOL_REF (Pmode
, name
);
4365 CONSTANT_POOL_ADDRESS_P (new_ref
)
4366 = CONSTANT_POOL_ADDRESS_P (operands
[1]);
4367 SYMBOL_REF_FLAGS (new_ref
) = SYMBOL_REF_FLAGS (operands
[1]);
4368 SYMBOL_REF_USED (new_ref
) = SYMBOL_REF_USED (operands
[1]);
4369 SYMBOL_REF_DECL (new_ref
) = SYMBOL_REF_DECL (operands
[1]);
4370 operands
[1] = new_ref
;
4373 if (DEFAULT_ABI
== ABI_DARWIN
)
4376 if (MACHO_DYNAMIC_NO_PIC_P
)
4378 /* Take care of any required data indirection. */
4379 operands
[1] = rs6000_machopic_legitimize_pic_address (
4380 operands
[1], mode
, operands
[0]);
4381 if (operands
[0] != operands
[1])
4382 emit_insn (gen_rtx_SET (VOIDmode
,
4383 operands
[0], operands
[1]));
4389 emit_insn (gen_macho_high_di (target
, operands
[1]));
4390 emit_insn (gen_macho_low_di (operands
[0], target
, operands
[1]));
4394 emit_insn (gen_macho_high (target
, operands
[1]));
4395 emit_insn (gen_macho_low (operands
[0], target
, operands
[1]));
4400 emit_insn (gen_elf_high (target
, operands
[1]));
4401 emit_insn (gen_elf_low (operands
[0], target
, operands
[1]));
4405 /* If this is a SYMBOL_REF that refers to a constant pool entry,
4406 and we have put it in the TOC, we just need to make a TOC-relative
4409 && GET_CODE (operands
[1]) == SYMBOL_REF
4410 && constant_pool_expr_p (operands
[1])
4411 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands
[1]),
4412 get_pool_mode (operands
[1])))
4414 operands
[1] = create_TOC_reference (operands
[1]);
4416 else if (mode
== Pmode
4417 && CONSTANT_P (operands
[1])
4418 && ((GET_CODE (operands
[1]) != CONST_INT
4419 && ! easy_fp_constant (operands
[1], mode
))
4420 || (GET_CODE (operands
[1]) == CONST_INT
4421 && num_insns_constant (operands
[1], mode
) > 2)
4422 || (GET_CODE (operands
[0]) == REG
4423 && FP_REGNO_P (REGNO (operands
[0]))))
4424 && GET_CODE (operands
[1]) != HIGH
4425 && ! legitimate_constant_pool_address_p (operands
[1])
4426 && ! toc_relative_expr_p (operands
[1]))
4428 /* Emit a USE operation so that the constant isn't deleted if
4429 expensive optimizations are turned on because nobody
4430 references it. This should only be done for operands that
4431 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
4432 This should not be done for operands that contain LABEL_REFs.
4433 For now, we just handle the obvious case. */
4434 if (GET_CODE (operands
[1]) != LABEL_REF
)
4435 emit_insn (gen_rtx_USE (VOIDmode
, operands
[1]));
4438 /* Darwin uses a special PIC legitimizer. */
4439 if (DEFAULT_ABI
== ABI_DARWIN
&& MACHOPIC_INDIRECT
)
4442 rs6000_machopic_legitimize_pic_address (operands
[1], mode
,
4444 if (operands
[0] != operands
[1])
4445 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
4450 /* If we are to limit the number of things we put in the TOC and
4451 this is a symbol plus a constant we can add in one insn,
4452 just put the symbol in the TOC and add the constant. Don't do
4453 this if reload is in progress. */
4454 if (GET_CODE (operands
[1]) == CONST
4455 && TARGET_NO_SUM_IN_TOC
&& ! reload_in_progress
4456 && GET_CODE (XEXP (operands
[1], 0)) == PLUS
4457 && add_operand (XEXP (XEXP (operands
[1], 0), 1), mode
)
4458 && (GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == LABEL_REF
4459 || GET_CODE (XEXP (XEXP (operands
[1], 0), 0)) == SYMBOL_REF
)
4460 && ! side_effects_p (operands
[0]))
4463 force_const_mem (mode
, XEXP (XEXP (operands
[1], 0), 0));
4464 rtx other
= XEXP (XEXP (operands
[1], 0), 1);
4466 sym
= force_reg (mode
, sym
);
4468 emit_insn (gen_addsi3 (operands
[0], sym
, other
));
4470 emit_insn (gen_adddi3 (operands
[0], sym
, other
));
4474 operands
[1] = force_const_mem (mode
, operands
[1]);
4477 && constant_pool_expr_p (XEXP (operands
[1], 0))
4478 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
4479 get_pool_constant (XEXP (operands
[1], 0)),
4480 get_pool_mode (XEXP (operands
[1], 0))))
4483 = gen_const_mem (mode
,
4484 create_TOC_reference (XEXP (operands
[1], 0)));
4485 set_mem_alias_set (operands
[1], get_TOC_alias_set ());
4491 rs6000_eliminate_indexed_memrefs (operands
);
4495 emit_insn (gen_rtx_PARALLEL (VOIDmode
,
4497 gen_rtx_SET (VOIDmode
,
4498 operands
[0], operands
[1]),
4499 gen_rtx_CLOBBER (VOIDmode
,
4500 gen_rtx_SCRATCH (SImode
)))));
4509 /* Above, we may have called force_const_mem which may have returned
4510 an invalid address. If we can, fix this up; otherwise, reload will
4511 have to deal with it. */
4512 if (GET_CODE (operands
[1]) == MEM
&& ! reload_in_progress
)
4513 operands
[1] = validize_mem (operands
[1]);
4516 emit_insn (gen_rtx_SET (VOIDmode
, operands
[0], operands
[1]));
4519 /* Nonzero if we can use a floating-point register to pass this arg. */
4520 #define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
4521 (GET_MODE_CLASS (MODE) == MODE_FLOAT \
4522 && (CUM)->fregno <= FP_ARG_MAX_REG \
4523 && TARGET_HARD_FLOAT && TARGET_FPRS)
4525 /* Nonzero if we can use an AltiVec register to pass this arg. */
4526 #define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
4527 (ALTIVEC_VECTOR_MODE (MODE) \
4528 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
4529 && TARGET_ALTIVEC_ABI \
4532 /* Return a nonzero value to say to return the function value in
4533 memory, just as large structures are always returned. TYPE will be
4534 the data type of the value, and FNTYPE will be the type of the
4535 function doing the returning, or @code{NULL} for libcalls.
4537 The AIX ABI for the RS/6000 specifies that all structures are
4538 returned in memory. The Darwin ABI does the same. The SVR4 ABI
4539 specifies that structures <= 8 bytes are returned in r3/r4, but a
4540 draft put them in memory, and GCC used to implement the draft
4541 instead of the final standard. Therefore, TARGET_AIX_STRUCT_RET
4542 controls this instead of DEFAULT_ABI; V.4 targets needing backward
4543 compatibility can change DRAFT_V4_STRUCT_RET to override the
4544 default, and -m switches get the final word. See
4545 rs6000_override_options for more details.
4547 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
4548 long double support is enabled. These values are returned in memory.
4550 int_size_in_bytes returns -1 for variable size objects, which go in
4551 memory always. The cast to unsigned makes -1 > 8. */
4554 rs6000_return_in_memory (tree type
, tree fntype ATTRIBUTE_UNUSED
)
4556 if (AGGREGATE_TYPE_P (type
)
4557 && (TARGET_AIX_STRUCT_RET
4558 || (unsigned HOST_WIDE_INT
) int_size_in_bytes (type
) > 8))
4560 if (DEFAULT_ABI
== ABI_V4
&& TYPE_MODE (type
) == TFmode
)
4565 /* Initialize a variable CUM of type CUMULATIVE_ARGS
4566 for a call to a function whose data type is FNTYPE.
4567 For a library call, FNTYPE is 0.
4569 For incoming args we set the number of arguments in the prototype large
4570 so we never return a PARALLEL. */
4573 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
,
4574 rtx libname ATTRIBUTE_UNUSED
, int incoming
,
4575 int libcall
, int n_named_args
)
4577 static CUMULATIVE_ARGS zero_cumulative
;
4579 *cum
= zero_cumulative
;
4581 cum
->fregno
= FP_ARG_MIN_REG
;
4582 cum
->vregno
= ALTIVEC_ARG_MIN_REG
;
4583 cum
->prototype
= (fntype
&& TYPE_ARG_TYPES (fntype
));
4584 cum
->call_cookie
= ((DEFAULT_ABI
== ABI_V4
&& libcall
)
4585 ? CALL_LIBCALL
: CALL_NORMAL
);
4586 cum
->sysv_gregno
= GP_ARG_MIN_REG
;
4587 cum
->stdarg
= fntype
4588 && (TYPE_ARG_TYPES (fntype
) != 0
4589 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
4590 != void_type_node
));
4592 cum
->nargs_prototype
= 0;
4593 if (incoming
|| cum
->prototype
)
4594 cum
->nargs_prototype
= n_named_args
;
4596 /* Check for a longcall attribute. */
4598 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype
))
4599 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype
)))
4600 cum
->call_cookie
= CALL_LONG
;
4602 if (TARGET_DEBUG_ARG
)
4604 fprintf (stderr
, "\ninit_cumulative_args:");
4607 tree ret_type
= TREE_TYPE (fntype
);
4608 fprintf (stderr
, " ret code = %s,",
4609 tree_code_name
[ (int)TREE_CODE (ret_type
) ]);
4612 if (cum
->call_cookie
& CALL_LONG
)
4613 fprintf (stderr
, " longcall,");
4615 fprintf (stderr
, " proto = %d, nargs = %d\n",
4616 cum
->prototype
, cum
->nargs_prototype
);
4621 && TARGET_ALTIVEC_ABI
4622 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype
))))
4624 error ("Cannot return value in vector register because"
4625 " altivec instructions are disabled, use -maltivec"
4626 " to enable them.");
4630 /* Return true if TYPE must be passed on the stack and not in registers. */
4633 rs6000_must_pass_in_stack (enum machine_mode mode
, tree type
)
4635 if (DEFAULT_ABI
== ABI_AIX
|| TARGET_64BIT
)
4636 return must_pass_in_stack_var_size (mode
, type
);
4638 return must_pass_in_stack_var_size_or_pad (mode
, type
);
4641 /* If defined, a C expression which determines whether, and in which
4642 direction, to pad out an argument with extra space. The value
4643 should be of type `enum direction': either `upward' to pad above
4644 the argument, `downward' to pad below, or `none' to inhibit
4647 For the AIX ABI structs are always stored left shifted in their
4651 function_arg_padding (enum machine_mode mode
, tree type
)
4653 #ifndef AGGREGATE_PADDING_FIXED
4654 #define AGGREGATE_PADDING_FIXED 0
4656 #ifndef AGGREGATES_PAD_UPWARD_ALWAYS
4657 #define AGGREGATES_PAD_UPWARD_ALWAYS 0
4660 if (!AGGREGATE_PADDING_FIXED
)
4662 /* GCC used to pass structures of the same size as integer types as
4663 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
4664 i.e. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
4665 passed padded downward, except that -mstrict-align further
4666 muddied the water in that multi-component structures of 2 and 4
4667 bytes in size were passed padded upward.
4669 The following arranges for best compatibility with previous
4670 versions of gcc, but removes the -mstrict-align dependency. */
4671 if (BYTES_BIG_ENDIAN
)
4673 HOST_WIDE_INT size
= 0;
4675 if (mode
== BLKmode
)
4677 if (type
&& TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
)
4678 size
= int_size_in_bytes (type
);
4681 size
= GET_MODE_SIZE (mode
);
4683 if (size
== 1 || size
== 2 || size
== 4)
4689 if (AGGREGATES_PAD_UPWARD_ALWAYS
)
4691 if (type
!= 0 && AGGREGATE_TYPE_P (type
))
4695 /* Fall back to the default. */
4696 return DEFAULT_FUNCTION_ARG_PADDING (mode
, type
);
4699 /* If defined, a C expression that gives the alignment boundary, in bits,
4700 of an argument with the specified mode and type. If it is not defined,
4701 PARM_BOUNDARY is used for all arguments.
4703 V.4 wants long longs to be double word aligned. */
4706 function_arg_boundary (enum machine_mode mode
, tree type ATTRIBUTE_UNUSED
)
4708 if (DEFAULT_ABI
== ABI_V4
&& GET_MODE_SIZE (mode
) == 8)
4710 else if (SPE_VECTOR_MODE (mode
))
4712 else if (ALTIVEC_VECTOR_MODE (mode
))
4715 return PARM_BOUNDARY
;
4718 /* Compute the size (in words) of a function argument. */
4720 static unsigned long
4721 rs6000_arg_size (enum machine_mode mode
, tree type
)
4725 if (mode
!= BLKmode
)
4726 size
= GET_MODE_SIZE (mode
);
4728 size
= int_size_in_bytes (type
);
4731 return (size
+ 3) >> 2;
4733 return (size
+ 7) >> 3;
4736 /* Update the data in CUM to advance over an argument
4737 of mode MODE and data type TYPE.
4738 (TYPE is null for libcalls where that information may not be available.)
4740 Note that for args passed by reference, function_arg will be called
4741 with MODE and TYPE set to that of the pointer to the arg, not the arg
4745 function_arg_advance (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
4746 tree type
, int named
)
4748 cum
->nargs_prototype
--;
4750 if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
4754 if (USE_ALTIVEC_FOR_ARG_P (cum
, mode
, type
, named
))
4757 if (!TARGET_ALTIVEC
)
4758 error ("Cannot pass argument in vector register because"
4759 " altivec instructions are disabled, use -maltivec"
4760 " to enable them.");
4762 /* PowerPC64 Linux and AIX allocate GPRs for a vector argument
4763 even if it is going to be passed in a vector register.
4764 Darwin does the same for variable-argument functions. */
4765 if ((DEFAULT_ABI
== ABI_AIX
&& TARGET_64BIT
)
4766 || (cum
->stdarg
&& DEFAULT_ABI
!= ABI_V4
))
4776 /* Vector parameters must be 16-byte aligned. This places
4777 them at 2 mod 4 in terms of words in 32-bit mode, since
4778 the parameter save area starts at offset 24 from the
4779 stack. In 64-bit mode, they just have to start on an
4780 even word, since the parameter save area is 16-byte
4781 aligned. Space for GPRs is reserved even if the argument
4782 will be passed in memory. */
4784 align
= (2 - cum
->words
) & 3;
4786 align
= cum
->words
& 1;
4787 cum
->words
+= align
+ rs6000_arg_size (mode
, type
);
4789 if (TARGET_DEBUG_ARG
)
4791 fprintf (stderr
, "function_adv: words = %2d, align=%d, ",
4793 fprintf (stderr
, "nargs = %4d, proto = %d, mode = %4s\n",
4794 cum
->nargs_prototype
, cum
->prototype
,
4795 GET_MODE_NAME (mode
));
4799 else if (TARGET_SPE_ABI
&& TARGET_SPE
&& SPE_VECTOR_MODE (mode
)
4801 && cum
->sysv_gregno
<= GP_ARG_MAX_REG
)
4803 else if (DEFAULT_ABI
== ABI_V4
)
4805 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
4806 && (mode
== SFmode
|| mode
== DFmode
))
4808 if (cum
->fregno
<= FP_ARG_V4_MAX_REG
)
4813 cum
->words
+= cum
->words
& 1;
4814 cum
->words
+= rs6000_arg_size (mode
, type
);
4819 int n_words
= rs6000_arg_size (mode
, type
);
4820 int gregno
= cum
->sysv_gregno
;
4822 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
4823 (r7,r8) or (r9,r10). As does any other 2 word item such
4824 as complex int due to a historical mistake. */
4826 gregno
+= (1 - gregno
) & 1;
4828 /* Multi-reg args are not split between registers and stack. */
4829 if (gregno
+ n_words
- 1 > GP_ARG_MAX_REG
)
4831 /* Long long and SPE vectors are aligned on the stack.
4832 So are other 2 word items such as complex int due to
4833 a historical mistake. */
4835 cum
->words
+= cum
->words
& 1;
4836 cum
->words
+= n_words
;
4839 /* Note: continuing to accumulate gregno past when we've started
4840 spilling to the stack indicates the fact that we've started
4841 spilling to the stack to expand_builtin_saveregs. */
4842 cum
->sysv_gregno
= gregno
+ n_words
;
4845 if (TARGET_DEBUG_ARG
)
4847 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
4848 cum
->words
, cum
->fregno
);
4849 fprintf (stderr
, "gregno = %2d, nargs = %4d, proto = %d, ",
4850 cum
->sysv_gregno
, cum
->nargs_prototype
, cum
->prototype
);
4851 fprintf (stderr
, "mode = %4s, named = %d\n",
4852 GET_MODE_NAME (mode
), named
);
4857 int n_words
= rs6000_arg_size (mode
, type
);
4858 int align
= function_arg_boundary (mode
, type
) / PARM_BOUNDARY
- 1;
4860 /* The simple alignment calculation here works because
4861 function_arg_boundary / PARM_BOUNDARY will only be 1 or 2.
4862 If we ever want to handle alignments larger than 8 bytes for
4863 32-bit or 16 bytes for 64-bit, then we'll need to take into
4864 account the offset to the start of the parm save area. */
4865 align
&= cum
->words
;
4866 cum
->words
+= align
+ n_words
;
4868 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
4869 && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
4870 cum
->fregno
+= (GET_MODE_SIZE (mode
) + 7) >> 3;
4872 if (TARGET_DEBUG_ARG
)
4874 fprintf (stderr
, "function_adv: words = %2d, fregno = %2d, ",
4875 cum
->words
, cum
->fregno
);
4876 fprintf (stderr
, "nargs = %4d, proto = %d, mode = %4s, ",
4877 cum
->nargs_prototype
, cum
->prototype
, GET_MODE_NAME (mode
));
4878 fprintf (stderr
, "named = %d, align = %d\n", named
, align
);
4883 /* Determine where to put a SIMD argument on the SPE. */
4886 rs6000_spe_function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
4891 int gregno
= cum
->sysv_gregno
;
4892 int n_words
= rs6000_arg_size (mode
, type
);
4894 /* SPE vectors are put in odd registers. */
4895 if (n_words
== 2 && (gregno
& 1) == 0)
4898 if (gregno
+ n_words
- 1 <= GP_ARG_MAX_REG
)
4901 enum machine_mode m
= SImode
;
4903 r1
= gen_rtx_REG (m
, gregno
);
4904 r1
= gen_rtx_EXPR_LIST (m
, r1
, const0_rtx
);
4905 r2
= gen_rtx_REG (m
, gregno
+ 1);
4906 r2
= gen_rtx_EXPR_LIST (m
, r2
, GEN_INT (4));
4907 return gen_rtx_PARALLEL (mode
, gen_rtvec (2, r1
, r2
));
4914 if (cum
->sysv_gregno
<= GP_ARG_MAX_REG
)
4915 return gen_rtx_REG (mode
, cum
->sysv_gregno
);
4921 /* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
4924 rs6000_mixed_function_arg (enum machine_mode mode
, tree type
, int align_words
)
4928 rtx rvec
[GP_ARG_NUM_REG
+ 1];
4930 if (align_words
>= GP_ARG_NUM_REG
)
4933 n_units
= rs6000_arg_size (mode
, type
);
4935 /* Optimize the simple case where the arg fits in one gpr, except in
4936 the case of BLKmode due to assign_parms assuming that registers are
4937 BITS_PER_WORD wide. */
4939 || (n_units
== 1 && mode
!= BLKmode
))
4940 return gen_rtx_REG (mode
, GP_ARG_MIN_REG
+ align_words
);
4943 if (align_words
+ n_units
> GP_ARG_NUM_REG
)
4944 /* Not all of the arg fits in gprs. Say that it goes in memory too,
4945 using a magic NULL_RTX component.
4946 FIXME: This is not strictly correct. Only some of the arg
4947 belongs in memory, not all of it. However, there isn't any way
4948 to do this currently, apart from building rtx descriptions for
4949 the pieces of memory we want stored. Due to bugs in the generic
4950 code we can't use the normal function_arg_partial_nregs scheme
4951 with the PARALLEL arg description we emit here.
4952 In any case, the code to store the whole arg to memory is often
4953 more efficient than code to store pieces, and we know that space
4954 is available in the right place for the whole arg. */
4955 rvec
[k
++] = gen_rtx_EXPR_LIST (VOIDmode
, NULL_RTX
, const0_rtx
);
4960 rtx r
= gen_rtx_REG (SImode
, GP_ARG_MIN_REG
+ align_words
);
4961 rtx off
= GEN_INT (i
++ * 4);
4962 rvec
[k
++] = gen_rtx_EXPR_LIST (VOIDmode
, r
, off
);
4964 while (++align_words
< GP_ARG_NUM_REG
&& --n_units
!= 0);
4966 return gen_rtx_PARALLEL (mode
, gen_rtvec_v (k
, rvec
));
4969 /* Determine where to put an argument to a function.
4970 Value is zero to push the argument on the stack,
4971 or a hard register in which to store the argument.
4973 MODE is the argument's machine mode.
4974 TYPE is the data type of the argument (as a tree).
4975 This is null for libcalls where that information may
4977 CUM is a variable of type CUMULATIVE_ARGS which gives info about
4978 the preceding args and about the function being called.
4979 NAMED is nonzero if this argument is a named parameter
4980 (otherwise it is an extra parameter matching an ellipsis).
4982 On RS/6000 the first eight words of non-FP are normally in registers
4983 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
4984 Under V.4, the first 8 FP args are in registers.
4986 If this is floating-point and no prototype is specified, we use
4987 both an FP and integer register (or possibly FP reg and stack). Library
4988 functions (when CALL_LIBCALL is set) always have the proper types for args,
4989 so we can pass the FP value just in one register. emit_library_function
4990 doesn't support PARALLEL anyway.
4992 Note that for args passed by reference, function_arg will be called
4993 with MODE and TYPE set to that of the pointer to the arg, not the arg
4997 function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
4998 tree type
, int named
)
5000 enum rs6000_abi abi
= DEFAULT_ABI
;
5002 /* Return a marker to indicate whether CR1 needs to set or clear the
5003 bit that V.4 uses to say fp args were passed in registers.
5004 Assume that we don't need the marker for software floating point,
5005 or compiler generated library calls. */
5006 if (mode
== VOIDmode
)
5009 && cum
->nargs_prototype
< 0
5010 && (cum
->call_cookie
& CALL_LIBCALL
) == 0
5011 && (cum
->prototype
|| TARGET_NO_PROTOTYPE
))
5013 /* For the SPE, we need to crxor CR6 always. */
5015 return GEN_INT (cum
->call_cookie
| CALL_V4_SET_FP_ARGS
);
5016 else if (TARGET_HARD_FLOAT
&& TARGET_FPRS
)
5017 return GEN_INT (cum
->call_cookie
5018 | ((cum
->fregno
== FP_ARG_MIN_REG
)
5019 ? CALL_V4_SET_FP_ARGS
5020 : CALL_V4_CLEAR_FP_ARGS
));
5023 return GEN_INT (cum
->call_cookie
);
5026 if (USE_ALTIVEC_FOR_ARG_P (cum
, mode
, type
, named
))
5027 if (TARGET_64BIT
&& ! cum
->prototype
)
5029 /* Vector parameters get passed in vector register
5030 and also in GPRs or memory, in absence of prototype. */
5033 align_words
= (cum
->words
+ 1) & ~1;
5035 if (align_words
>= GP_ARG_NUM_REG
)
5041 slot
= gen_rtx_REG (mode
, GP_ARG_MIN_REG
+ align_words
);
5043 return gen_rtx_PARALLEL (mode
,
5045 gen_rtx_EXPR_LIST (VOIDmode
,
5047 gen_rtx_EXPR_LIST (VOIDmode
,
5048 gen_rtx_REG (mode
, cum
->vregno
),
5052 return gen_rtx_REG (mode
, cum
->vregno
);
5053 else if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
5055 if (named
|| abi
== ABI_V4
)
5059 /* Vector parameters to varargs functions under AIX or Darwin
5060 get passed in memory and possibly also in GPRs. */
5061 int align
, align_words
, n_words
;
5062 enum machine_mode part_mode
;
5064 /* Vector parameters must be 16-byte aligned. This places them at
5065 2 mod 4 in terms of words in 32-bit mode, since the parameter
5066 save area starts at offset 24 from the stack. In 64-bit mode,
5067 they just have to start on an even word, since the parameter
5068 save area is 16-byte aligned. */
5070 align
= (2 - cum
->words
) & 3;
5072 align
= cum
->words
& 1;
5073 align_words
= cum
->words
+ align
;
5075 /* Out of registers? Memory, then. */
5076 if (align_words
>= GP_ARG_NUM_REG
)
5079 if (TARGET_32BIT
&& TARGET_POWERPC64
)
5080 return rs6000_mixed_function_arg (mode
, type
, align_words
);
5082 /* The vector value goes in GPRs. Only the part of the
5083 value in GPRs is reported here. */
5085 n_words
= rs6000_arg_size (mode
, type
);
5086 if (align_words
+ n_words
> GP_ARG_NUM_REG
)
5087 /* Fortunately, there are only two possibilities, the value
5088 is either wholly in GPRs or half in GPRs and half not. */
5091 return gen_rtx_REG (part_mode
, GP_ARG_MIN_REG
+ align_words
);
5094 else if (TARGET_SPE_ABI
&& TARGET_SPE
&& SPE_VECTOR_MODE (mode
))
5095 return rs6000_spe_function_arg (cum
, mode
, type
);
5096 else if (abi
== ABI_V4
)
5098 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
5099 && (mode
== SFmode
|| mode
== DFmode
))
5101 if (cum
->fregno
<= FP_ARG_V4_MAX_REG
)
5102 return gen_rtx_REG (mode
, cum
->fregno
);
5108 int n_words
= rs6000_arg_size (mode
, type
);
5109 int gregno
= cum
->sysv_gregno
;
5111 /* Long long and SPE vectors are put in (r3,r4), (r5,r6),
5112 (r7,r8) or (r9,r10). As does any other 2 word item such
5113 as complex int due to a historical mistake. */
5115 gregno
+= (1 - gregno
) & 1;
5117 /* Multi-reg args are not split between registers and stack. */
5118 if (gregno
+ n_words
- 1 > GP_ARG_MAX_REG
)
5121 if (TARGET_32BIT
&& TARGET_POWERPC64
)
5122 return rs6000_mixed_function_arg (mode
, type
,
5123 gregno
- GP_ARG_MIN_REG
);
5124 return gen_rtx_REG (mode
, gregno
);
5129 int align
= function_arg_boundary (mode
, type
) / PARM_BOUNDARY
- 1;
5130 int align_words
= cum
->words
+ (cum
->words
& align
);
5132 if (USE_FP_FOR_ARG_P (cum
, mode
, type
))
5134 rtx rvec
[GP_ARG_NUM_REG
+ 1];
5138 enum machine_mode fmode
= mode
;
5139 unsigned long n_fpreg
= (GET_MODE_SIZE (mode
) + 7) >> 3;
5141 if (cum
->fregno
+ n_fpreg
> FP_ARG_MAX_REG
+ 1)
5143 /* Currently, we only ever need one reg here because complex
5144 doubles are split. */
5145 if (cum
->fregno
!= FP_ARG_MAX_REG
|| fmode
!= TFmode
)
5148 /* Long double split over regs and memory. */
5152 /* Do we also need to pass this arg in the parameter save
5155 && (cum
->nargs_prototype
<= 0
5156 || (DEFAULT_ABI
== ABI_AIX
5158 && align_words
>= GP_ARG_NUM_REG
)));
5160 if (!needs_psave
&& mode
== fmode
)
5161 return gen_rtx_REG (fmode
, cum
->fregno
);
5166 /* Describe the part that goes in gprs or the stack.
5167 This piece must come first, before the fprs. */
5168 if (align_words
< GP_ARG_NUM_REG
)
5170 unsigned long n_words
= rs6000_arg_size (mode
, type
);
5172 if (align_words
+ n_words
> GP_ARG_NUM_REG
5173 || (TARGET_32BIT
&& TARGET_POWERPC64
))
5175 /* If this is partially on the stack, then we only
5176 include the portion actually in registers here. */
5177 enum machine_mode rmode
= TARGET_32BIT
? SImode
: DImode
;
5180 if (align_words
+ n_words
> GP_ARG_NUM_REG
5181 && (TARGET_32BIT
&& TARGET_POWERPC64
))
5182 /* Not all of the arg fits in gprs. Say that it goes in memory too,
5183 using a magic NULL_RTX component. Also see comment in
5184 rs6000_mixed_function_arg for why the normal
5185 function_arg_partial_nregs scheme doesn't work in this case. */
5186 rvec
[k
++] = gen_rtx_EXPR_LIST (VOIDmode
, NULL_RTX
, const0_rtx
);
5189 r
= gen_rtx_REG (rmode
,
5190 GP_ARG_MIN_REG
+ align_words
);
5191 off
= GEN_INT (i
++ * GET_MODE_SIZE (rmode
));
5192 rvec
[k
++] = gen_rtx_EXPR_LIST (VOIDmode
, r
, off
);
5194 while (++align_words
< GP_ARG_NUM_REG
&& --n_words
!= 0);
5198 /* The whole arg fits in gprs. */
5199 r
= gen_rtx_REG (mode
, GP_ARG_MIN_REG
+ align_words
);
5200 rvec
[k
++] = gen_rtx_EXPR_LIST (VOIDmode
, r
, const0_rtx
);
5204 /* It's entirely in memory. */
5205 rvec
[k
++] = gen_rtx_EXPR_LIST (VOIDmode
, NULL_RTX
, const0_rtx
);
5208 /* Describe where this piece goes in the fprs. */
5209 r
= gen_rtx_REG (fmode
, cum
->fregno
);
5210 rvec
[k
++] = gen_rtx_EXPR_LIST (VOIDmode
, r
, const0_rtx
);
5212 return gen_rtx_PARALLEL (mode
, gen_rtvec_v (k
, rvec
));
5214 else if (align_words
< GP_ARG_NUM_REG
)
5216 if (TARGET_32BIT
&& TARGET_POWERPC64
)
5217 return rs6000_mixed_function_arg (mode
, type
, align_words
);
5219 return gen_rtx_REG (mode
, GP_ARG_MIN_REG
+ align_words
);
5226 /* For an arg passed partly in registers and partly in memory, this is
5227 the number of registers used. For args passed entirely in registers
5228 or entirely in memory, zero. When an arg is described by a PARALLEL,
5229 perhaps using more than one register type, this function returns the
5230 number of registers used by the first element of the PARALLEL. */
5233 function_arg_partial_nregs (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
5234 tree type
, int named
)
5241 if (DEFAULT_ABI
== ABI_V4
)
5244 if (USE_ALTIVEC_FOR_ARG_P (cum
, mode
, type
, named
)
5245 && cum
->nargs_prototype
>= 0)
5248 align
= function_arg_boundary (mode
, type
) / PARM_BOUNDARY
- 1;
5249 parm_offset
= TARGET_32BIT
? 2 : 0;
5250 align_words
= cum
->words
+ ((parm_offset
- cum
->words
) & align
);
5252 if (USE_FP_FOR_ARG_P (cum
, mode
, type
)
5253 /* If we are passing this arg in gprs as well, then this function
5254 should return the number of gprs (or memory) partially passed,
5255 *not* the number of fprs. */
5257 && (cum
->nargs_prototype
<= 0
5258 || (DEFAULT_ABI
== ABI_AIX
5260 && align_words
>= GP_ARG_NUM_REG
))))
5262 if (cum
->fregno
+ ((GET_MODE_SIZE (mode
) + 7) >> 3) > FP_ARG_MAX_REG
+ 1)
5263 ret
= FP_ARG_MAX_REG
+ 1 - cum
->fregno
;
5264 else if (cum
->nargs_prototype
>= 0)
5268 if (align_words
< GP_ARG_NUM_REG
5269 && GP_ARG_NUM_REG
< align_words
+ rs6000_arg_size (mode
, type
))
5270 ret
= GP_ARG_NUM_REG
- align_words
;
5272 if (ret
!= 0 && TARGET_DEBUG_ARG
)
5273 fprintf (stderr
, "function_arg_partial_nregs: %d\n", ret
);
5278 /* A C expression that indicates when an argument must be passed by
5279 reference. If nonzero for an argument, a copy of that argument is
5280 made in memory and a pointer to the argument is passed instead of
5281 the argument itself. The pointer is passed in whatever way is
5282 appropriate for passing a pointer to that type.
5284 Under V.4, aggregates and long double are passed by reference.
5286 As an extension to all 32-bit ABIs, AltiVec vectors are passed by
5287 reference unless the AltiVec vector extension ABI is in force.
5289 As an extension to all ABIs, variable sized types are passed by
5293 rs6000_pass_by_reference (CUMULATIVE_ARGS
*cum ATTRIBUTE_UNUSED
,
5294 enum machine_mode mode ATTRIBUTE_UNUSED
,
5295 tree type
, bool named ATTRIBUTE_UNUSED
)
5297 if ((DEFAULT_ABI
== ABI_V4
5298 && ((type
&& AGGREGATE_TYPE_P (type
))
5300 || (TARGET_32BIT
&& !TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
5301 || (type
&& int_size_in_bytes (type
) < 0))
5303 if (TARGET_DEBUG_ARG
)
5304 fprintf (stderr
, "function_arg_pass_by_reference\n");
5312 rs6000_move_block_from_reg (int regno
, rtx x
, int nregs
)
5315 enum machine_mode reg_mode
= TARGET_32BIT
? SImode
: DImode
;
5320 for (i
= 0; i
< nregs
; i
++)
5322 rtx tem
= adjust_address_nv (x
, reg_mode
, i
*GET_MODE_SIZE(reg_mode
));
5323 if (reload_completed
)
5325 if (! strict_memory_address_p (reg_mode
, XEXP (tem
, 0)))
5328 tem
= simplify_gen_subreg (reg_mode
, x
, BLKmode
,
5329 i
* GET_MODE_SIZE(reg_mode
));
5332 tem
= replace_equiv_address (tem
, XEXP (tem
, 0));
5334 if (tem
== NULL_RTX
)
5337 emit_move_insn (tem
, gen_rtx_REG (reg_mode
, regno
+ i
));
5342 /* Perform any needed actions needed for a function that is receiving a
5343 variable number of arguments.
5347 MODE and TYPE are the mode and type of the current parameter.
5349 PRETEND_SIZE is a variable that should be set to the amount of stack
5350 that must be pushed by the prolog to pretend that our caller pushed
5353 Normally, this macro will push all remaining incoming registers on the
5354 stack and set PRETEND_SIZE to the length of the registers pushed. */
5357 setup_incoming_varargs (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
5358 tree type
, int *pretend_size ATTRIBUTE_UNUSED
, int no_rtl
)
5360 CUMULATIVE_ARGS next_cum
;
5361 int reg_size
= TARGET_32BIT
? 4 : 8;
5362 rtx save_area
= NULL_RTX
, mem
;
5363 int first_reg_offset
, set
;
5365 /* Skip the last named argument. */
5367 function_arg_advance (&next_cum
, mode
, type
, 1);
5369 if (DEFAULT_ABI
== ABI_V4
)
5372 save_area
= plus_constant (virtual_stack_vars_rtx
,
5373 - RS6000_VARARGS_SIZE
);
5375 first_reg_offset
= next_cum
.sysv_gregno
- GP_ARG_MIN_REG
;
5379 first_reg_offset
= next_cum
.words
;
5380 save_area
= virtual_incoming_args_rtx
;
5382 if (targetm
.calls
.must_pass_in_stack (mode
, type
))
5383 first_reg_offset
+= rs6000_arg_size (TYPE_MODE (type
), type
);
5386 set
= get_varargs_alias_set ();
5387 if (! no_rtl
&& first_reg_offset
< GP_ARG_NUM_REG
)
5389 mem
= gen_rtx_MEM (BLKmode
,
5390 plus_constant (save_area
,
5391 first_reg_offset
* reg_size
)),
5392 set_mem_alias_set (mem
, set
);
5393 set_mem_align (mem
, BITS_PER_WORD
);
5395 rs6000_move_block_from_reg (GP_ARG_MIN_REG
+ first_reg_offset
, mem
,
5396 GP_ARG_NUM_REG
- first_reg_offset
);
5399 /* Save FP registers if needed. */
5400 if (DEFAULT_ABI
== ABI_V4
5401 && TARGET_HARD_FLOAT
&& TARGET_FPRS
5403 && next_cum
.fregno
<= FP_ARG_V4_MAX_REG
)
5405 int fregno
= next_cum
.fregno
;
5406 rtx cr1
= gen_rtx_REG (CCmode
, CR1_REGNO
);
5407 rtx lab
= gen_label_rtx ();
5408 int off
= (GP_ARG_NUM_REG
* reg_size
) + ((fregno
- FP_ARG_MIN_REG
) * 8);
5410 emit_jump_insn (gen_rtx_SET (VOIDmode
,
5412 gen_rtx_IF_THEN_ELSE (VOIDmode
,
5413 gen_rtx_NE (VOIDmode
, cr1
,
5415 gen_rtx_LABEL_REF (VOIDmode
, lab
),
5418 while (fregno
<= FP_ARG_V4_MAX_REG
)
5420 mem
= gen_rtx_MEM (DFmode
, plus_constant (save_area
, off
));
5421 set_mem_alias_set (mem
, set
);
5422 emit_move_insn (mem
, gen_rtx_REG (DFmode
, fregno
));
5431 /* Create the va_list data type. */
5434 rs6000_build_builtin_va_list (void)
5436 tree f_gpr
, f_fpr
, f_res
, f_ovf
, f_sav
, record
, type_decl
;
5438 /* For AIX, prefer 'char *' because that's what the system
5439 header files like. */
5440 if (DEFAULT_ABI
!= ABI_V4
)
5441 return build_pointer_type (char_type_node
);
5443 record
= (*lang_hooks
.types
.make_type
) (RECORD_TYPE
);
5444 type_decl
= build_decl (TYPE_DECL
, get_identifier ("__va_list_tag"), record
);
5446 f_gpr
= build_decl (FIELD_DECL
, get_identifier ("gpr"),
5447 unsigned_char_type_node
);
5448 f_fpr
= build_decl (FIELD_DECL
, get_identifier ("fpr"),
5449 unsigned_char_type_node
);
5450 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
5452 f_res
= build_decl (FIELD_DECL
, get_identifier ("reserved"),
5453 short_unsigned_type_node
);
5454 f_ovf
= build_decl (FIELD_DECL
, get_identifier ("overflow_arg_area"),
5456 f_sav
= build_decl (FIELD_DECL
, get_identifier ("reg_save_area"),
5459 DECL_FIELD_CONTEXT (f_gpr
) = record
;
5460 DECL_FIELD_CONTEXT (f_fpr
) = record
;
5461 DECL_FIELD_CONTEXT (f_res
) = record
;
5462 DECL_FIELD_CONTEXT (f_ovf
) = record
;
5463 DECL_FIELD_CONTEXT (f_sav
) = record
;
5465 TREE_CHAIN (record
) = type_decl
;
5466 TYPE_NAME (record
) = type_decl
;
5467 TYPE_FIELDS (record
) = f_gpr
;
5468 TREE_CHAIN (f_gpr
) = f_fpr
;
5469 TREE_CHAIN (f_fpr
) = f_res
;
5470 TREE_CHAIN (f_res
) = f_ovf
;
5471 TREE_CHAIN (f_ovf
) = f_sav
;
5473 layout_type (record
);
5475 /* The correct type is an array type of one element. */
5476 return build_array_type (record
, build_index_type (size_zero_node
));
5479 /* Implement va_start. */
5482 rs6000_va_start (tree valist
, rtx nextarg
)
5484 HOST_WIDE_INT words
, n_gpr
, n_fpr
;
5485 tree f_gpr
, f_fpr
, f_res
, f_ovf
, f_sav
;
5486 tree gpr
, fpr
, ovf
, sav
, t
;
5488 /* Only SVR4 needs something special. */
5489 if (DEFAULT_ABI
!= ABI_V4
)
5491 std_expand_builtin_va_start (valist
, nextarg
);
5495 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
5496 f_fpr
= TREE_CHAIN (f_gpr
);
5497 f_res
= TREE_CHAIN (f_fpr
);
5498 f_ovf
= TREE_CHAIN (f_res
);
5499 f_sav
= TREE_CHAIN (f_ovf
);
5501 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
5502 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
, NULL_TREE
);
5503 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
, NULL_TREE
);
5504 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
, NULL_TREE
);
5505 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
, NULL_TREE
);
5507 /* Count number of gp and fp argument registers used. */
5508 words
= current_function_args_info
.words
;
5509 n_gpr
= current_function_args_info
.sysv_gregno
- GP_ARG_MIN_REG
;
5510 n_fpr
= current_function_args_info
.fregno
- FP_ARG_MIN_REG
;
5512 if (TARGET_DEBUG_ARG
)
5513 fprintf (stderr
, "va_start: words = "HOST_WIDE_INT_PRINT_DEC
", n_gpr = "
5514 HOST_WIDE_INT_PRINT_DEC
", n_fpr = "HOST_WIDE_INT_PRINT_DEC
"\n",
5515 words
, n_gpr
, n_fpr
);
5517 t
= build (MODIFY_EXPR
, TREE_TYPE (gpr
), gpr
,
5518 build_int_cst (NULL_TREE
, n_gpr
));
5519 TREE_SIDE_EFFECTS (t
) = 1;
5520 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5522 t
= build (MODIFY_EXPR
, TREE_TYPE (fpr
), fpr
,
5523 build_int_cst (NULL_TREE
, n_fpr
));
5524 TREE_SIDE_EFFECTS (t
) = 1;
5525 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5527 /* Find the overflow area. */
5528 t
= make_tree (TREE_TYPE (ovf
), virtual_incoming_args_rtx
);
5530 t
= build (PLUS_EXPR
, TREE_TYPE (ovf
), t
,
5531 build_int_cst (NULL_TREE
, words
* UNITS_PER_WORD
));
5532 t
= build (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
5533 TREE_SIDE_EFFECTS (t
) = 1;
5534 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5536 /* Find the register save area. */
5537 t
= make_tree (TREE_TYPE (sav
), virtual_stack_vars_rtx
);
5538 t
= build (PLUS_EXPR
, TREE_TYPE (sav
), t
,
5539 build_int_cst (NULL_TREE
, -RS6000_VARARGS_SIZE
));
5540 t
= build (MODIFY_EXPR
, TREE_TYPE (sav
), sav
, t
);
5541 TREE_SIDE_EFFECTS (t
) = 1;
5542 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5545 /* Implement va_arg. */
5548 rs6000_gimplify_va_arg (tree valist
, tree type
, tree
*pre_p
, tree
*post_p
)
5550 tree f_gpr
, f_fpr
, f_res
, f_ovf
, f_sav
;
5551 tree gpr
, fpr
, ovf
, sav
, reg
, t
, u
;
5552 int size
, rsize
, n_reg
, sav_ofs
, sav_scale
;
5553 tree lab_false
, lab_over
, addr
;
5555 tree ptrtype
= build_pointer_type (type
);
5557 if (pass_by_reference (NULL
, TYPE_MODE (type
), type
, false))
5559 t
= rs6000_gimplify_va_arg (valist
, ptrtype
, pre_p
, post_p
);
5560 return build_fold_indirect_ref (t
);
5563 if (DEFAULT_ABI
!= ABI_V4
)
5565 if (targetm
.calls
.split_complex_arg
&& TREE_CODE (type
) == COMPLEX_TYPE
)
5567 tree elem_type
= TREE_TYPE (type
);
5568 enum machine_mode elem_mode
= TYPE_MODE (elem_type
);
5569 int elem_size
= GET_MODE_SIZE (elem_mode
);
5571 if (elem_size
< UNITS_PER_WORD
)
5573 tree real_part
, imag_part
;
5574 tree post
= NULL_TREE
;
5576 real_part
= rs6000_gimplify_va_arg (valist
, elem_type
, pre_p
,
5578 /* Copy the value into a temporary, lest the formal temporary
5579 be reused out from under us. */
5580 real_part
= get_initialized_tmp_var (real_part
, pre_p
, &post
);
5581 append_to_statement_list (post
, pre_p
);
5583 imag_part
= rs6000_gimplify_va_arg (valist
, elem_type
, pre_p
,
5586 return build (COMPLEX_EXPR
, type
, real_part
, imag_part
);
5590 return std_gimplify_va_arg_expr (valist
, type
, pre_p
, post_p
);
5593 f_gpr
= TYPE_FIELDS (TREE_TYPE (va_list_type_node
));
5594 f_fpr
= TREE_CHAIN (f_gpr
);
5595 f_res
= TREE_CHAIN (f_fpr
);
5596 f_ovf
= TREE_CHAIN (f_res
);
5597 f_sav
= TREE_CHAIN (f_ovf
);
5599 valist
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)), valist
);
5600 gpr
= build (COMPONENT_REF
, TREE_TYPE (f_gpr
), valist
, f_gpr
, NULL_TREE
);
5601 fpr
= build (COMPONENT_REF
, TREE_TYPE (f_fpr
), valist
, f_fpr
, NULL_TREE
);
5602 ovf
= build (COMPONENT_REF
, TREE_TYPE (f_ovf
), valist
, f_ovf
, NULL_TREE
);
5603 sav
= build (COMPONENT_REF
, TREE_TYPE (f_sav
), valist
, f_sav
, NULL_TREE
);
5605 size
= int_size_in_bytes (type
);
5606 rsize
= (size
+ 3) / 4;
5609 if (TARGET_HARD_FLOAT
&& TARGET_FPRS
5610 && (TYPE_MODE (type
) == SFmode
|| TYPE_MODE (type
) == DFmode
))
5612 /* FP args go in FP registers, if present. */
5617 if (TYPE_MODE (type
) == DFmode
)
5622 /* Otherwise into GP registers. */
5631 /* Pull the value out of the saved registers.... */
5634 addr
= create_tmp_var (ptr_type_node
, "addr");
5635 DECL_POINTER_ALIAS_SET (addr
) = get_varargs_alias_set ();
5637 /* AltiVec vectors never go in registers when -mabi=altivec. */
5638 if (TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (TYPE_MODE (type
)))
5642 lab_false
= create_artificial_label ();
5643 lab_over
= create_artificial_label ();
5645 /* Long long and SPE vectors are aligned in the registers.
5646 As are any other 2 gpr item such as complex int due to a
5647 historical mistake. */
5651 u
= build2 (BIT_AND_EXPR
, TREE_TYPE (reg
), reg
,
5652 size_int (n_reg
- 1));
5653 u
= build2 (POSTINCREMENT_EXPR
, TREE_TYPE (reg
), reg
, u
);
5656 t
= fold_convert (TREE_TYPE (reg
), size_int (8 - n_reg
+ 1));
5657 t
= build2 (GE_EXPR
, boolean_type_node
, u
, t
);
5658 u
= build1 (GOTO_EXPR
, void_type_node
, lab_false
);
5659 t
= build3 (COND_EXPR
, void_type_node
, t
, u
, NULL_TREE
);
5660 gimplify_and_add (t
, pre_p
);
5664 t
= build2 (PLUS_EXPR
, ptr_type_node
, sav
, size_int (sav_ofs
));
5666 u
= build2 (POSTINCREMENT_EXPR
, TREE_TYPE (reg
), reg
, size_int (n_reg
));
5667 u
= build1 (CONVERT_EXPR
, integer_type_node
, u
);
5668 u
= build2 (MULT_EXPR
, integer_type_node
, u
, size_int (sav_scale
));
5669 t
= build2 (PLUS_EXPR
, ptr_type_node
, t
, u
);
5671 t
= build2 (MODIFY_EXPR
, void_type_node
, addr
, t
);
5672 gimplify_and_add (t
, pre_p
);
5674 t
= build1 (GOTO_EXPR
, void_type_node
, lab_over
);
5675 gimplify_and_add (t
, pre_p
);
5677 t
= build1 (LABEL_EXPR
, void_type_node
, lab_false
);
5678 append_to_statement_list (t
, pre_p
);
5682 /* Ensure that we don't find any more args in regs.
5683 Alignment has taken care of the n_reg == 2 case. */
5684 t
= build (MODIFY_EXPR
, TREE_TYPE (reg
), reg
, size_int (8));
5685 gimplify_and_add (t
, pre_p
);
5689 /* ... otherwise out of the overflow area. */
5691 /* Care for on-stack alignment if needed. */
5695 t
= build2 (PLUS_EXPR
, TREE_TYPE (t
), t
, size_int (align
- 1));
5696 t
= build2 (BIT_AND_EXPR
, TREE_TYPE (t
), t
,
5697 build_int_cst (NULL_TREE
, -align
));
5699 gimplify_expr (&t
, pre_p
, NULL
, is_gimple_val
, fb_rvalue
);
5701 u
= build2 (MODIFY_EXPR
, void_type_node
, addr
, t
);
5702 gimplify_and_add (u
, pre_p
);
5704 t
= build2 (PLUS_EXPR
, TREE_TYPE (t
), t
, size_int (size
));
5705 t
= build2 (MODIFY_EXPR
, TREE_TYPE (ovf
), ovf
, t
);
5706 gimplify_and_add (t
, pre_p
);
5710 t
= build1 (LABEL_EXPR
, void_type_node
, lab_over
);
5711 append_to_statement_list (t
, pre_p
);
5714 addr
= fold_convert (ptrtype
, addr
);
5715 return build_fold_indirect_ref (addr
);
5720 #define def_builtin(MASK, NAME, TYPE, CODE) \
5722 if ((MASK) & target_flags) \
5723 lang_hooks.builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
5727 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
5729 static const struct builtin_description bdesc_3arg
[] =
5731 { MASK_ALTIVEC
, CODE_FOR_altivec_vmaddfp
, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP
},
5732 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhaddshs
, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS
},
5733 { MASK_ALTIVEC
, CODE_FOR_altivec_vmhraddshs
, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS
},
5734 { MASK_ALTIVEC
, CODE_FOR_altivec_vmladduhm
, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM
},
5735 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumubm
, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM
},
5736 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsummbm
, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM
},
5737 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhm
, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM
},
5738 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshm
, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM
},
5739 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumuhs
, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS
},
5740 { MASK_ALTIVEC
, CODE_FOR_altivec_vmsumshs
, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS
},
5741 { MASK_ALTIVEC
, CODE_FOR_altivec_vnmsubfp
, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP
},
5742 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_4sf
, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF
},
5743 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_4si
, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI
},
5744 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_8hi
, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI
},
5745 { MASK_ALTIVEC
, CODE_FOR_altivec_vperm_16qi
, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI
},
5746 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_4sf
, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF
},
5747 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_4si
, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI
},
5748 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_8hi
, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI
},
5749 { MASK_ALTIVEC
, CODE_FOR_altivec_vsel_16qi
, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI
},
5750 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_16qi
, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI
},
5751 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_8hi
, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI
},
5752 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_4si
, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI
},
5753 { MASK_ALTIVEC
, CODE_FOR_altivec_vsldoi_4sf
, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF
},
5756 /* DST operations: void foo (void *, const int, const char). */
5758 static const struct builtin_description bdesc_dst
[] =
5760 { MASK_ALTIVEC
, CODE_FOR_altivec_dst
, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST
},
5761 { MASK_ALTIVEC
, CODE_FOR_altivec_dstt
, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT
},
5762 { MASK_ALTIVEC
, CODE_FOR_altivec_dstst
, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST
},
5763 { MASK_ALTIVEC
, CODE_FOR_altivec_dststt
, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT
}
5766 /* Simple binary operations: VECc = foo (VECa, VECb). */
5768 static struct builtin_description bdesc_2arg
[] =
5770 { MASK_ALTIVEC
, CODE_FOR_addv16qi3
, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM
},
5771 { MASK_ALTIVEC
, CODE_FOR_addv8hi3
, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM
},
5772 { MASK_ALTIVEC
, CODE_FOR_addv4si3
, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM
},
5773 { MASK_ALTIVEC
, CODE_FOR_addv4sf3
, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP
},
5774 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddcuw
, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW
},
5775 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddubs
, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS
},
5776 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsbs
, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS
},
5777 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduhs
, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS
},
5778 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddshs
, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS
},
5779 { MASK_ALTIVEC
, CODE_FOR_altivec_vadduws
, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS
},
5780 { MASK_ALTIVEC
, CODE_FOR_altivec_vaddsws
, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS
},
5781 { MASK_ALTIVEC
, CODE_FOR_andv4si3
, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND
},
5782 { MASK_ALTIVEC
, CODE_FOR_altivec_vandc
, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC
},
5783 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgub
, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB
},
5784 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsb
, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB
},
5785 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguh
, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH
},
5786 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsh
, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH
},
5787 { MASK_ALTIVEC
, CODE_FOR_altivec_vavguw
, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW
},
5788 { MASK_ALTIVEC
, CODE_FOR_altivec_vavgsw
, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW
},
5789 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfux
, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX
},
5790 { MASK_ALTIVEC
, CODE_FOR_altivec_vcfsx
, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX
},
5791 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpbfp
, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP
},
5792 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequb
, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB
},
5793 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequh
, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH
},
5794 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpequw
, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW
},
5795 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpeqfp
, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP
},
5796 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgefp
, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP
},
5797 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtub
, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB
},
5798 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsb
, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB
},
5799 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuh
, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH
},
5800 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsh
, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH
},
5801 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtuw
, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW
},
5802 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtsw
, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW
},
5803 { MASK_ALTIVEC
, CODE_FOR_altivec_vcmpgtfp
, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP
},
5804 { MASK_ALTIVEC
, CODE_FOR_altivec_vctsxs
, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS
},
5805 { MASK_ALTIVEC
, CODE_FOR_altivec_vctuxs
, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS
},
5806 { MASK_ALTIVEC
, CODE_FOR_umaxv16qi3
, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB
},
5807 { MASK_ALTIVEC
, CODE_FOR_smaxv16qi3
, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB
},
5808 { MASK_ALTIVEC
, CODE_FOR_umaxv8hi3
, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH
},
5809 { MASK_ALTIVEC
, CODE_FOR_smaxv8hi3
, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH
},
5810 { MASK_ALTIVEC
, CODE_FOR_umaxv4si3
, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW
},
5811 { MASK_ALTIVEC
, CODE_FOR_smaxv4si3
, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW
},
5812 { MASK_ALTIVEC
, CODE_FOR_smaxv4sf3
, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP
},
5813 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghb
, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB
},
5814 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghh
, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH
},
5815 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrghw
, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW
},
5816 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglb
, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB
},
5817 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglh
, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH
},
5818 { MASK_ALTIVEC
, CODE_FOR_altivec_vmrglw
, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW
},
5819 { MASK_ALTIVEC
, CODE_FOR_uminv16qi3
, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB
},
5820 { MASK_ALTIVEC
, CODE_FOR_sminv16qi3
, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB
},
5821 { MASK_ALTIVEC
, CODE_FOR_uminv8hi3
, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH
},
5822 { MASK_ALTIVEC
, CODE_FOR_sminv8hi3
, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH
},
5823 { MASK_ALTIVEC
, CODE_FOR_uminv4si3
, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW
},
5824 { MASK_ALTIVEC
, CODE_FOR_sminv4si3
, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW
},
5825 { MASK_ALTIVEC
, CODE_FOR_sminv4sf3
, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP
},
5826 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleub
, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB
},
5827 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesb
, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB
},
5828 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuleuh
, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH
},
5829 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulesh
, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH
},
5830 { MASK_ALTIVEC
, CODE_FOR_altivec_vmuloub
, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB
},
5831 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosb
, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB
},
5832 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulouh
, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH
},
5833 { MASK_ALTIVEC
, CODE_FOR_altivec_vmulosh
, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH
},
5834 { MASK_ALTIVEC
, CODE_FOR_altivec_vnor
, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR
},
5835 { MASK_ALTIVEC
, CODE_FOR_iorv4si3
, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR
},
5836 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhum
, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM
},
5837 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwum
, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM
},
5838 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkpx
, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX
},
5839 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhss
, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS
},
5840 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshss
, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS
},
5841 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwss
, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS
},
5842 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswss
, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS
},
5843 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuhus
, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS
},
5844 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkshus
, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS
},
5845 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkuwus
, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS
},
5846 { MASK_ALTIVEC
, CODE_FOR_altivec_vpkswus
, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS
},
5847 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlb
, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB
},
5848 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlh
, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH
},
5849 { MASK_ALTIVEC
, CODE_FOR_altivec_vrlw
, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW
},
5850 { MASK_ALTIVEC
, CODE_FOR_altivec_vslb
, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB
},
5851 { MASK_ALTIVEC
, CODE_FOR_altivec_vslh
, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH
},
5852 { MASK_ALTIVEC
, CODE_FOR_altivec_vslw
, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW
},
5853 { MASK_ALTIVEC
, CODE_FOR_altivec_vsl
, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL
},
5854 { MASK_ALTIVEC
, CODE_FOR_altivec_vslo
, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO
},
5855 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltb
, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB
},
5856 { MASK_ALTIVEC
, CODE_FOR_altivec_vsplth
, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH
},
5857 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltw
, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW
},
5858 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrb
, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB
},
5859 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrh
, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH
},
5860 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrw
, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW
},
5861 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrab
, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB
},
5862 { MASK_ALTIVEC
, CODE_FOR_altivec_vsrah
, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH
},
5863 { MASK_ALTIVEC
, CODE_FOR_altivec_vsraw
, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW
},
5864 { MASK_ALTIVEC
, CODE_FOR_altivec_vsr
, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR
},
5865 { MASK_ALTIVEC
, CODE_FOR_altivec_vsro
, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO
},
5866 { MASK_ALTIVEC
, CODE_FOR_subv16qi3
, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM
},
5867 { MASK_ALTIVEC
, CODE_FOR_subv8hi3
, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM
},
5868 { MASK_ALTIVEC
, CODE_FOR_subv4si3
, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM
},
5869 { MASK_ALTIVEC
, CODE_FOR_subv4sf3
, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP
},
5870 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubcuw
, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW
},
5871 { MASK_ALTIVEC
, CODE_FOR_altivec_vsububs
, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS
},
5872 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsbs
, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS
},
5873 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuhs
, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS
},
5874 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubshs
, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS
},
5875 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubuws
, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS
},
5876 { MASK_ALTIVEC
, CODE_FOR_altivec_vsubsws
, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS
},
5877 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4ubs
, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS
},
5878 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4sbs
, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS
},
5879 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum4shs
, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS
},
5880 { MASK_ALTIVEC
, CODE_FOR_altivec_vsum2sws
, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS
},
5881 { MASK_ALTIVEC
, CODE_FOR_altivec_vsumsws
, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS
},
5882 { MASK_ALTIVEC
, CODE_FOR_xorv4si3
, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR
},
5884 /* Place holder, leave as first spe builtin. */
5885 { 0, CODE_FOR_spe_evaddw
, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW
},
5886 { 0, CODE_FOR_spe_evand
, "__builtin_spe_evand", SPE_BUILTIN_EVAND
},
5887 { 0, CODE_FOR_spe_evandc
, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC
},
5888 { 0, CODE_FOR_spe_evdivws
, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS
},
5889 { 0, CODE_FOR_spe_evdivwu
, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU
},
5890 { 0, CODE_FOR_spe_eveqv
, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV
},
5891 { 0, CODE_FOR_spe_evfsadd
, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD
},
5892 { 0, CODE_FOR_spe_evfsdiv
, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV
},
5893 { 0, CODE_FOR_spe_evfsmul
, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL
},
5894 { 0, CODE_FOR_spe_evfssub
, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB
},
5895 { 0, CODE_FOR_spe_evmergehi
, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI
},
5896 { 0, CODE_FOR_spe_evmergehilo
, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO
},
5897 { 0, CODE_FOR_spe_evmergelo
, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO
},
5898 { 0, CODE_FOR_spe_evmergelohi
, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI
},
5899 { 0, CODE_FOR_spe_evmhegsmfaa
, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA
},
5900 { 0, CODE_FOR_spe_evmhegsmfan
, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN
},
5901 { 0, CODE_FOR_spe_evmhegsmiaa
, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA
},
5902 { 0, CODE_FOR_spe_evmhegsmian
, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN
},
5903 { 0, CODE_FOR_spe_evmhegumiaa
, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA
},
5904 { 0, CODE_FOR_spe_evmhegumian
, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN
},
5905 { 0, CODE_FOR_spe_evmhesmf
, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF
},
5906 { 0, CODE_FOR_spe_evmhesmfa
, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA
},
5907 { 0, CODE_FOR_spe_evmhesmfaaw
, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW
},
5908 { 0, CODE_FOR_spe_evmhesmfanw
, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW
},
5909 { 0, CODE_FOR_spe_evmhesmi
, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI
},
5910 { 0, CODE_FOR_spe_evmhesmia
, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA
},
5911 { 0, CODE_FOR_spe_evmhesmiaaw
, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW
},
5912 { 0, CODE_FOR_spe_evmhesmianw
, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW
},
5913 { 0, CODE_FOR_spe_evmhessf
, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF
},
5914 { 0, CODE_FOR_spe_evmhessfa
, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA
},
5915 { 0, CODE_FOR_spe_evmhessfaaw
, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW
},
5916 { 0, CODE_FOR_spe_evmhessfanw
, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW
},
5917 { 0, CODE_FOR_spe_evmhessiaaw
, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW
},
5918 { 0, CODE_FOR_spe_evmhessianw
, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW
},
5919 { 0, CODE_FOR_spe_evmheumi
, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI
},
5920 { 0, CODE_FOR_spe_evmheumia
, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA
},
5921 { 0, CODE_FOR_spe_evmheumiaaw
, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW
},
5922 { 0, CODE_FOR_spe_evmheumianw
, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW
},
5923 { 0, CODE_FOR_spe_evmheusiaaw
, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW
},
5924 { 0, CODE_FOR_spe_evmheusianw
, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW
},
5925 { 0, CODE_FOR_spe_evmhogsmfaa
, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA
},
5926 { 0, CODE_FOR_spe_evmhogsmfan
, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN
},
5927 { 0, CODE_FOR_spe_evmhogsmiaa
, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA
},
5928 { 0, CODE_FOR_spe_evmhogsmian
, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN
},
5929 { 0, CODE_FOR_spe_evmhogumiaa
, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA
},
5930 { 0, CODE_FOR_spe_evmhogumian
, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN
},
5931 { 0, CODE_FOR_spe_evmhosmf
, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF
},
5932 { 0, CODE_FOR_spe_evmhosmfa
, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA
},
5933 { 0, CODE_FOR_spe_evmhosmfaaw
, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW
},
5934 { 0, CODE_FOR_spe_evmhosmfanw
, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW
},
5935 { 0, CODE_FOR_spe_evmhosmi
, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI
},
5936 { 0, CODE_FOR_spe_evmhosmia
, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA
},
5937 { 0, CODE_FOR_spe_evmhosmiaaw
, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW
},
5938 { 0, CODE_FOR_spe_evmhosmianw
, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW
},
5939 { 0, CODE_FOR_spe_evmhossf
, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF
},
5940 { 0, CODE_FOR_spe_evmhossfa
, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA
},
5941 { 0, CODE_FOR_spe_evmhossfaaw
, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW
},
5942 { 0, CODE_FOR_spe_evmhossfanw
, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW
},
5943 { 0, CODE_FOR_spe_evmhossiaaw
, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW
},
5944 { 0, CODE_FOR_spe_evmhossianw
, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW
},
5945 { 0, CODE_FOR_spe_evmhoumi
, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI
},
5946 { 0, CODE_FOR_spe_evmhoumia
, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA
},
5947 { 0, CODE_FOR_spe_evmhoumiaaw
, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW
},
5948 { 0, CODE_FOR_spe_evmhoumianw
, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW
},
5949 { 0, CODE_FOR_spe_evmhousiaaw
, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW
},
5950 { 0, CODE_FOR_spe_evmhousianw
, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW
},
5951 { 0, CODE_FOR_spe_evmwhsmf
, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF
},
5952 { 0, CODE_FOR_spe_evmwhsmfa
, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA
},
5953 { 0, CODE_FOR_spe_evmwhsmi
, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI
},
5954 { 0, CODE_FOR_spe_evmwhsmia
, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA
},
5955 { 0, CODE_FOR_spe_evmwhssf
, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF
},
5956 { 0, CODE_FOR_spe_evmwhssfa
, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA
},
5957 { 0, CODE_FOR_spe_evmwhumi
, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI
},
5958 { 0, CODE_FOR_spe_evmwhumia
, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA
},
5959 { 0, CODE_FOR_spe_evmwlsmiaaw
, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW
},
5960 { 0, CODE_FOR_spe_evmwlsmianw
, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW
},
5961 { 0, CODE_FOR_spe_evmwlssiaaw
, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW
},
5962 { 0, CODE_FOR_spe_evmwlssianw
, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW
},
5963 { 0, CODE_FOR_spe_evmwlumi
, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI
},
5964 { 0, CODE_FOR_spe_evmwlumia
, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA
},
5965 { 0, CODE_FOR_spe_evmwlumiaaw
, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW
},
5966 { 0, CODE_FOR_spe_evmwlumianw
, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW
},
5967 { 0, CODE_FOR_spe_evmwlusiaaw
, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW
},
5968 { 0, CODE_FOR_spe_evmwlusianw
, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW
},
5969 { 0, CODE_FOR_spe_evmwsmf
, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF
},
5970 { 0, CODE_FOR_spe_evmwsmfa
, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA
},
5971 { 0, CODE_FOR_spe_evmwsmfaa
, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA
},
5972 { 0, CODE_FOR_spe_evmwsmfan
, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN
},
5973 { 0, CODE_FOR_spe_evmwsmi
, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI
},
5974 { 0, CODE_FOR_spe_evmwsmia
, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA
},
5975 { 0, CODE_FOR_spe_evmwsmiaa
, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA
},
5976 { 0, CODE_FOR_spe_evmwsmian
, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN
},
5977 { 0, CODE_FOR_spe_evmwssf
, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF
},
5978 { 0, CODE_FOR_spe_evmwssfa
, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA
},
5979 { 0, CODE_FOR_spe_evmwssfaa
, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA
},
5980 { 0, CODE_FOR_spe_evmwssfan
, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN
},
5981 { 0, CODE_FOR_spe_evmwumi
, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI
},
5982 { 0, CODE_FOR_spe_evmwumia
, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA
},
5983 { 0, CODE_FOR_spe_evmwumiaa
, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA
},
5984 { 0, CODE_FOR_spe_evmwumian
, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN
},
5985 { 0, CODE_FOR_spe_evnand
, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND
},
5986 { 0, CODE_FOR_spe_evnor
, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR
},
5987 { 0, CODE_FOR_spe_evor
, "__builtin_spe_evor", SPE_BUILTIN_EVOR
},
5988 { 0, CODE_FOR_spe_evorc
, "__builtin_spe_evorc", SPE_BUILTIN_EVORC
},
5989 { 0, CODE_FOR_spe_evrlw
, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW
},
5990 { 0, CODE_FOR_spe_evslw
, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW
},
5991 { 0, CODE_FOR_spe_evsrws
, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS
},
5992 { 0, CODE_FOR_spe_evsrwu
, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU
},
5993 { 0, CODE_FOR_spe_evsubfw
, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW
},
5995 /* SPE binary operations expecting a 5-bit unsigned literal. */
5996 { 0, CODE_FOR_spe_evaddiw
, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW
},
5998 { 0, CODE_FOR_spe_evrlwi
, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI
},
5999 { 0, CODE_FOR_spe_evslwi
, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI
},
6000 { 0, CODE_FOR_spe_evsrwis
, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS
},
6001 { 0, CODE_FOR_spe_evsrwiu
, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU
},
6002 { 0, CODE_FOR_spe_evsubifw
, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW
},
6003 { 0, CODE_FOR_spe_evmwhssfaa
, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA
},
6004 { 0, CODE_FOR_spe_evmwhssmaa
, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA
},
6005 { 0, CODE_FOR_spe_evmwhsmfaa
, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA
},
6006 { 0, CODE_FOR_spe_evmwhsmiaa
, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA
},
6007 { 0, CODE_FOR_spe_evmwhusiaa
, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA
},
6008 { 0, CODE_FOR_spe_evmwhumiaa
, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA
},
6009 { 0, CODE_FOR_spe_evmwhssfan
, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN
},
6010 { 0, CODE_FOR_spe_evmwhssian
, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN
},
6011 { 0, CODE_FOR_spe_evmwhsmfan
, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN
},
6012 { 0, CODE_FOR_spe_evmwhsmian
, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN
},
6013 { 0, CODE_FOR_spe_evmwhusian
, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN
},
6014 { 0, CODE_FOR_spe_evmwhumian
, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN
},
6015 { 0, CODE_FOR_spe_evmwhgssfaa
, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA
},
6016 { 0, CODE_FOR_spe_evmwhgsmfaa
, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA
},
6017 { 0, CODE_FOR_spe_evmwhgsmiaa
, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA
},
6018 { 0, CODE_FOR_spe_evmwhgumiaa
, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA
},
6019 { 0, CODE_FOR_spe_evmwhgssfan
, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN
},
6020 { 0, CODE_FOR_spe_evmwhgsmfan
, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN
},
6021 { 0, CODE_FOR_spe_evmwhgsmian
, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN
},
6022 { 0, CODE_FOR_spe_evmwhgumian
, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN
},
6023 { 0, CODE_FOR_spe_brinc
, "__builtin_spe_brinc", SPE_BUILTIN_BRINC
},
6025 /* Place-holder. Leave as last binary SPE builtin. */
6026 { 0, CODE_FOR_xorv2si3
, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR
},
6029 /* AltiVec predicates. */
6031 struct builtin_description_predicates
6033 const unsigned int mask
;
6034 const enum insn_code icode
;
6036 const char *const name
;
6037 const enum rs6000_builtins code
;
6040 static const struct builtin_description_predicates bdesc_altivec_preds
[] =
6042 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P
},
6043 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P
},
6044 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P
},
6045 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4sf
, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P
},
6046 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P
},
6047 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P
},
6048 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v4si
, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P
},
6049 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P
},
6050 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P
},
6051 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v8hi
, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P
},
6052 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P
},
6053 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P
},
6054 { MASK_ALTIVEC
, CODE_FOR_altivec_predicate_v16qi
, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P
}
6057 /* SPE predicates. */
6058 static struct builtin_description bdesc_spe_predicates
[] =
6060 /* Place-holder. Leave as first. */
6061 { 0, CODE_FOR_spe_evcmpeq
, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ
},
6062 { 0, CODE_FOR_spe_evcmpgts
, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS
},
6063 { 0, CODE_FOR_spe_evcmpgtu
, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU
},
6064 { 0, CODE_FOR_spe_evcmplts
, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS
},
6065 { 0, CODE_FOR_spe_evcmpltu
, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU
},
6066 { 0, CODE_FOR_spe_evfscmpeq
, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ
},
6067 { 0, CODE_FOR_spe_evfscmpgt
, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT
},
6068 { 0, CODE_FOR_spe_evfscmplt
, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT
},
6069 { 0, CODE_FOR_spe_evfststeq
, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ
},
6070 { 0, CODE_FOR_spe_evfststgt
, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT
},
6071 /* Place-holder. Leave as last. */
6072 { 0, CODE_FOR_spe_evfststlt
, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT
},
6075 /* SPE evsel predicates. */
6076 static struct builtin_description bdesc_spe_evsel
[] =
6078 /* Place-holder. Leave as first. */
6079 { 0, CODE_FOR_spe_evcmpgts
, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS
},
6080 { 0, CODE_FOR_spe_evcmpgtu
, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU
},
6081 { 0, CODE_FOR_spe_evcmplts
, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS
},
6082 { 0, CODE_FOR_spe_evcmpltu
, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU
},
6083 { 0, CODE_FOR_spe_evcmpeq
, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ
},
6084 { 0, CODE_FOR_spe_evfscmpgt
, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT
},
6085 { 0, CODE_FOR_spe_evfscmplt
, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT
},
6086 { 0, CODE_FOR_spe_evfscmpeq
, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ
},
6087 { 0, CODE_FOR_spe_evfststgt
, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT
},
6088 { 0, CODE_FOR_spe_evfststlt
, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT
},
6089 /* Place-holder. Leave as last. */
6090 { 0, CODE_FOR_spe_evfststeq
, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ
},
6093 /* ABS* operations. */
6095 static const struct builtin_description bdesc_abs
[] =
6097 { MASK_ALTIVEC
, CODE_FOR_absv4si2
, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI
},
6098 { MASK_ALTIVEC
, CODE_FOR_absv8hi2
, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI
},
6099 { MASK_ALTIVEC
, CODE_FOR_absv4sf2
, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF
},
6100 { MASK_ALTIVEC
, CODE_FOR_absv16qi2
, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI
},
6101 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v4si
, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI
},
6102 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v8hi
, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI
},
6103 { MASK_ALTIVEC
, CODE_FOR_altivec_abss_v16qi
, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI
}
6106 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
6109 static struct builtin_description bdesc_1arg
[] =
6111 { MASK_ALTIVEC
, CODE_FOR_altivec_vexptefp
, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP
},
6112 { MASK_ALTIVEC
, CODE_FOR_altivec_vlogefp
, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP
},
6113 { MASK_ALTIVEC
, CODE_FOR_altivec_vrefp
, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP
},
6114 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfim
, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM
},
6115 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfin
, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN
},
6116 { MASK_ALTIVEC
, CODE_FOR_altivec_vrfip
, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP
},
6117 { MASK_ALTIVEC
, CODE_FOR_ftruncv4sf2
, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ
},
6118 { MASK_ALTIVEC
, CODE_FOR_altivec_vrsqrtefp
, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP
},
6119 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisb
, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB
},
6120 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltish
, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH
},
6121 { MASK_ALTIVEC
, CODE_FOR_altivec_vspltisw
, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW
},
6122 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsb
, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB
},
6123 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhpx
, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX
},
6124 { MASK_ALTIVEC
, CODE_FOR_altivec_vupkhsh
, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH
},
6125 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsb
, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB
},
6126 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklpx
, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX
},
6127 { MASK_ALTIVEC
, CODE_FOR_altivec_vupklsh
, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH
},
6129 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
6130 end with SPE_BUILTIN_EVSUBFUSIAAW. */
6131 { 0, CODE_FOR_spe_evabs
, "__builtin_spe_evabs", SPE_BUILTIN_EVABS
},
6132 { 0, CODE_FOR_spe_evaddsmiaaw
, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW
},
6133 { 0, CODE_FOR_spe_evaddssiaaw
, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW
},
6134 { 0, CODE_FOR_spe_evaddumiaaw
, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW
},
6135 { 0, CODE_FOR_spe_evaddusiaaw
, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW
},
6136 { 0, CODE_FOR_spe_evcntlsw
, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW
},
6137 { 0, CODE_FOR_spe_evcntlzw
, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW
},
6138 { 0, CODE_FOR_spe_evextsb
, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB
},
6139 { 0, CODE_FOR_spe_evextsh
, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH
},
6140 { 0, CODE_FOR_spe_evfsabs
, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS
},
6141 { 0, CODE_FOR_spe_evfscfsf
, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF
},
6142 { 0, CODE_FOR_spe_evfscfsi
, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI
},
6143 { 0, CODE_FOR_spe_evfscfuf
, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF
},
6144 { 0, CODE_FOR_spe_evfscfui
, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI
},
6145 { 0, CODE_FOR_spe_evfsctsf
, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF
},
6146 { 0, CODE_FOR_spe_evfsctsi
, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI
},
6147 { 0, CODE_FOR_spe_evfsctsiz
, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ
},
6148 { 0, CODE_FOR_spe_evfsctuf
, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF
},
6149 { 0, CODE_FOR_spe_evfsctui
, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI
},
6150 { 0, CODE_FOR_spe_evfsctuiz
, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ
},
6151 { 0, CODE_FOR_spe_evfsnabs
, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS
},
6152 { 0, CODE_FOR_spe_evfsneg
, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG
},
6153 { 0, CODE_FOR_spe_evmra
, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA
},
6154 { 0, CODE_FOR_negv2si2
, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG
},
6155 { 0, CODE_FOR_spe_evrndw
, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW
},
6156 { 0, CODE_FOR_spe_evsubfsmiaaw
, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW
},
6157 { 0, CODE_FOR_spe_evsubfssiaaw
, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW
},
6158 { 0, CODE_FOR_spe_evsubfumiaaw
, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW
},
6160 /* Place-holder. Leave as last unary SPE builtin. */
6161 { 0, CODE_FOR_spe_evsubfusiaaw
, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW
},
6165 rs6000_expand_unop_builtin (enum insn_code icode
, tree arglist
, rtx target
)
6168 tree arg0
= TREE_VALUE (arglist
);
6169 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6170 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
6171 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
6173 if (icode
== CODE_FOR_nothing
)
6174 /* Builtin not supported on this processor. */
6177 /* If we got invalid arguments bail out before generating bad rtl. */
6178 if (arg0
== error_mark_node
)
6181 if (icode
== CODE_FOR_altivec_vspltisb
6182 || icode
== CODE_FOR_altivec_vspltish
6183 || icode
== CODE_FOR_altivec_vspltisw
6184 || icode
== CODE_FOR_spe_evsplatfi
6185 || icode
== CODE_FOR_spe_evsplati
)
6187 /* Only allow 5-bit *signed* literals. */
6188 if (GET_CODE (op0
) != CONST_INT
6189 || INTVAL (op0
) > 0x1f
6190 || INTVAL (op0
) < -0x1f)
6192 error ("argument 1 must be a 5-bit signed literal");
6198 || GET_MODE (target
) != tmode
6199 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6200 target
= gen_reg_rtx (tmode
);
6202 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
6203 op0
= copy_to_mode_reg (mode0
, op0
);
6205 pat
= GEN_FCN (icode
) (target
, op0
);
6214 altivec_expand_abs_builtin (enum insn_code icode
, tree arglist
, rtx target
)
6216 rtx pat
, scratch1
, scratch2
;
6217 tree arg0
= TREE_VALUE (arglist
);
6218 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6219 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
6220 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
6222 /* If we have invalid arguments, bail out before generating bad rtl. */
6223 if (arg0
== error_mark_node
)
6227 || GET_MODE (target
) != tmode
6228 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6229 target
= gen_reg_rtx (tmode
);
6231 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
6232 op0
= copy_to_mode_reg (mode0
, op0
);
6234 scratch1
= gen_reg_rtx (mode0
);
6235 scratch2
= gen_reg_rtx (mode0
);
6237 pat
= GEN_FCN (icode
) (target
, op0
, scratch1
, scratch2
);
6246 rs6000_expand_binop_builtin (enum insn_code icode
, tree arglist
, rtx target
)
6249 tree arg0
= TREE_VALUE (arglist
);
6250 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
6251 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6252 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
6253 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
6254 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
6255 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
6257 if (icode
== CODE_FOR_nothing
)
6258 /* Builtin not supported on this processor. */
6261 /* If we got invalid arguments bail out before generating bad rtl. */
6262 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
6265 if (icode
== CODE_FOR_altivec_vcfux
6266 || icode
== CODE_FOR_altivec_vcfsx
6267 || icode
== CODE_FOR_altivec_vctsxs
6268 || icode
== CODE_FOR_altivec_vctuxs
6269 || icode
== CODE_FOR_altivec_vspltb
6270 || icode
== CODE_FOR_altivec_vsplth
6271 || icode
== CODE_FOR_altivec_vspltw
6272 || icode
== CODE_FOR_spe_evaddiw
6273 || icode
== CODE_FOR_spe_evldd
6274 || icode
== CODE_FOR_spe_evldh
6275 || icode
== CODE_FOR_spe_evldw
6276 || icode
== CODE_FOR_spe_evlhhesplat
6277 || icode
== CODE_FOR_spe_evlhhossplat
6278 || icode
== CODE_FOR_spe_evlhhousplat
6279 || icode
== CODE_FOR_spe_evlwhe
6280 || icode
== CODE_FOR_spe_evlwhos
6281 || icode
== CODE_FOR_spe_evlwhou
6282 || icode
== CODE_FOR_spe_evlwhsplat
6283 || icode
== CODE_FOR_spe_evlwwsplat
6284 || icode
== CODE_FOR_spe_evrlwi
6285 || icode
== CODE_FOR_spe_evslwi
6286 || icode
== CODE_FOR_spe_evsrwis
6287 || icode
== CODE_FOR_spe_evsubifw
6288 || icode
== CODE_FOR_spe_evsrwiu
)
6290 /* Only allow 5-bit unsigned literals. */
6292 if (TREE_CODE (arg1
) != INTEGER_CST
6293 || TREE_INT_CST_LOW (arg1
) & ~0x1f)
6295 error ("argument 2 must be a 5-bit unsigned literal");
6301 || GET_MODE (target
) != tmode
6302 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6303 target
= gen_reg_rtx (tmode
);
6305 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
6306 op0
= copy_to_mode_reg (mode0
, op0
);
6307 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
6308 op1
= copy_to_mode_reg (mode1
, op1
);
6310 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
6319 altivec_expand_predicate_builtin (enum insn_code icode
, const char *opcode
,
6320 tree arglist
, rtx target
)
6323 tree cr6_form
= TREE_VALUE (arglist
);
6324 tree arg0
= TREE_VALUE (TREE_CHAIN (arglist
));
6325 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
6326 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6327 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
6328 enum machine_mode tmode
= SImode
;
6329 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
6330 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
6333 if (TREE_CODE (cr6_form
) != INTEGER_CST
)
6335 error ("argument 1 of __builtin_altivec_predicate must be a constant");
6339 cr6_form_int
= TREE_INT_CST_LOW (cr6_form
);
6344 /* If we have invalid arguments, bail out before generating bad rtl. */
6345 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
6349 || GET_MODE (target
) != tmode
6350 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6351 target
= gen_reg_rtx (tmode
);
6353 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
6354 op0
= copy_to_mode_reg (mode0
, op0
);
6355 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
6356 op1
= copy_to_mode_reg (mode1
, op1
);
6358 scratch
= gen_reg_rtx (mode0
);
6360 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
,
6361 gen_rtx_SYMBOL_REF (Pmode
, opcode
));
6366 /* The vec_any* and vec_all* predicates use the same opcodes for two
6367 different operations, but the bits in CR6 will be different
6368 depending on what information we want. So we have to play tricks
6369 with CR6 to get the right bits out.
6371 If you think this is disgusting, look at the specs for the
6372 AltiVec predicates. */
6374 switch (cr6_form_int
)
6377 emit_insn (gen_cr6_test_for_zero (target
));
6380 emit_insn (gen_cr6_test_for_zero_reverse (target
));
6383 emit_insn (gen_cr6_test_for_lt (target
));
6386 emit_insn (gen_cr6_test_for_lt_reverse (target
));
6389 error ("argument 1 of __builtin_altivec_predicate is out of range");
6397 altivec_expand_lv_builtin (enum insn_code icode
, tree arglist
, rtx target
)
6400 tree arg0
= TREE_VALUE (arglist
);
6401 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
6402 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
6403 enum machine_mode mode0
= Pmode
;
6404 enum machine_mode mode1
= Pmode
;
6405 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6406 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
6408 if (icode
== CODE_FOR_nothing
)
6409 /* Builtin not supported on this processor. */
6412 /* If we got invalid arguments bail out before generating bad rtl. */
6413 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
6417 || GET_MODE (target
) != tmode
6418 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6419 target
= gen_reg_rtx (tmode
);
6421 op1
= copy_to_mode_reg (mode1
, op1
);
6423 if (op0
== const0_rtx
)
6425 addr
= gen_rtx_MEM (tmode
, op1
);
6429 op0
= copy_to_mode_reg (mode0
, op0
);
6430 addr
= gen_rtx_MEM (tmode
, gen_rtx_PLUS (Pmode
, op0
, op1
));
6433 pat
= GEN_FCN (icode
) (target
, addr
);
6443 spe_expand_stv_builtin (enum insn_code icode
, tree arglist
)
6445 tree arg0
= TREE_VALUE (arglist
);
6446 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
6447 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
6448 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6449 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
6450 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
6452 enum machine_mode mode0
= insn_data
[icode
].operand
[0].mode
;
6453 enum machine_mode mode1
= insn_data
[icode
].operand
[1].mode
;
6454 enum machine_mode mode2
= insn_data
[icode
].operand
[2].mode
;
6456 /* Invalid arguments. Bail before doing anything stoopid! */
6457 if (arg0
== error_mark_node
6458 || arg1
== error_mark_node
6459 || arg2
== error_mark_node
)
6462 if (! (*insn_data
[icode
].operand
[2].predicate
) (op0
, mode2
))
6463 op0
= copy_to_mode_reg (mode2
, op0
);
6464 if (! (*insn_data
[icode
].operand
[0].predicate
) (op1
, mode0
))
6465 op1
= copy_to_mode_reg (mode0
, op1
);
6466 if (! (*insn_data
[icode
].operand
[1].predicate
) (op2
, mode1
))
6467 op2
= copy_to_mode_reg (mode1
, op2
);
6469 pat
= GEN_FCN (icode
) (op1
, op2
, op0
);
6476 altivec_expand_stv_builtin (enum insn_code icode
, tree arglist
)
6478 tree arg0
= TREE_VALUE (arglist
);
6479 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
6480 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
6481 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6482 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
6483 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
6485 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
6486 enum machine_mode mode1
= Pmode
;
6487 enum machine_mode mode2
= Pmode
;
6489 /* Invalid arguments. Bail before doing anything stoopid! */
6490 if (arg0
== error_mark_node
6491 || arg1
== error_mark_node
6492 || arg2
== error_mark_node
)
6495 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, tmode
))
6496 op0
= copy_to_mode_reg (tmode
, op0
);
6498 op2
= copy_to_mode_reg (mode2
, op2
);
6500 if (op1
== const0_rtx
)
6502 addr
= gen_rtx_MEM (tmode
, op2
);
6506 op1
= copy_to_mode_reg (mode1
, op1
);
6507 addr
= gen_rtx_MEM (tmode
, gen_rtx_PLUS (Pmode
, op1
, op2
));
6510 pat
= GEN_FCN (icode
) (addr
, op0
);
6517 rs6000_expand_ternop_builtin (enum insn_code icode
, tree arglist
, rtx target
)
6520 tree arg0
= TREE_VALUE (arglist
);
6521 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
6522 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
6523 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6524 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
6525 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
6526 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
6527 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
6528 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
6529 enum machine_mode mode2
= insn_data
[icode
].operand
[3].mode
;
6531 if (icode
== CODE_FOR_nothing
)
6532 /* Builtin not supported on this processor. */
6535 /* If we got invalid arguments bail out before generating bad rtl. */
6536 if (arg0
== error_mark_node
6537 || arg1
== error_mark_node
6538 || arg2
== error_mark_node
)
6541 if (icode
== CODE_FOR_altivec_vsldoi_4sf
6542 || icode
== CODE_FOR_altivec_vsldoi_4si
6543 || icode
== CODE_FOR_altivec_vsldoi_8hi
6544 || icode
== CODE_FOR_altivec_vsldoi_16qi
)
6546 /* Only allow 4-bit unsigned literals. */
6548 if (TREE_CODE (arg2
) != INTEGER_CST
6549 || TREE_INT_CST_LOW (arg2
) & ~0xf)
6551 error ("argument 3 must be a 4-bit unsigned literal");
6557 || GET_MODE (target
) != tmode
6558 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6559 target
= gen_reg_rtx (tmode
);
6561 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
6562 op0
= copy_to_mode_reg (mode0
, op0
);
6563 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
6564 op1
= copy_to_mode_reg (mode1
, op1
);
6565 if (! (*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
))
6566 op2
= copy_to_mode_reg (mode2
, op2
);
6568 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
);
6576 /* Expand the lvx builtins. */
6578 altivec_expand_ld_builtin (tree exp
, rtx target
, bool *expandedp
)
6580 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6581 tree arglist
= TREE_OPERAND (exp
, 1);
6582 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
6584 enum machine_mode tmode
, mode0
;
6586 enum insn_code icode
;
6590 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi
:
6591 icode
= CODE_FOR_altivec_lvx_16qi
;
6593 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi
:
6594 icode
= CODE_FOR_altivec_lvx_8hi
;
6596 case ALTIVEC_BUILTIN_LD_INTERNAL_4si
:
6597 icode
= CODE_FOR_altivec_lvx_4si
;
6599 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf
:
6600 icode
= CODE_FOR_altivec_lvx_4sf
;
6609 arg0
= TREE_VALUE (arglist
);
6610 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6611 tmode
= insn_data
[icode
].operand
[0].mode
;
6612 mode0
= insn_data
[icode
].operand
[1].mode
;
6615 || GET_MODE (target
) != tmode
6616 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6617 target
= gen_reg_rtx (tmode
);
6619 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
6620 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
6622 pat
= GEN_FCN (icode
) (target
, op0
);
6629 /* Expand the stvx builtins. */
6631 altivec_expand_st_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
6634 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6635 tree arglist
= TREE_OPERAND (exp
, 1);
6636 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
6638 enum machine_mode mode0
, mode1
;
6640 enum insn_code icode
;
6644 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi
:
6645 icode
= CODE_FOR_altivec_stvx_16qi
;
6647 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi
:
6648 icode
= CODE_FOR_altivec_stvx_8hi
;
6650 case ALTIVEC_BUILTIN_ST_INTERNAL_4si
:
6651 icode
= CODE_FOR_altivec_stvx_4si
;
6653 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf
:
6654 icode
= CODE_FOR_altivec_stvx_4sf
;
6661 arg0
= TREE_VALUE (arglist
);
6662 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
6663 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6664 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
6665 mode0
= insn_data
[icode
].operand
[0].mode
;
6666 mode1
= insn_data
[icode
].operand
[1].mode
;
6668 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
6669 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
6670 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
6671 op1
= copy_to_mode_reg (mode1
, op1
);
6673 pat
= GEN_FCN (icode
) (op0
, op1
);
6681 /* Expand the dst builtins. */
6683 altivec_expand_dst_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
6686 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6687 tree arglist
= TREE_OPERAND (exp
, 1);
6688 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
6689 tree arg0
, arg1
, arg2
;
6690 enum machine_mode mode0
, mode1
, mode2
;
6691 rtx pat
, op0
, op1
, op2
;
6692 struct builtin_description
*d
;
6697 /* Handle DST variants. */
6698 d
= (struct builtin_description
*) bdesc_dst
;
6699 for (i
= 0; i
< ARRAY_SIZE (bdesc_dst
); i
++, d
++)
6700 if (d
->code
== fcode
)
6702 arg0
= TREE_VALUE (arglist
);
6703 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
6704 arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
6705 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6706 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
6707 op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
6708 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
6709 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
6710 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
6712 /* Invalid arguments, bail out before generating bad rtl. */
6713 if (arg0
== error_mark_node
6714 || arg1
== error_mark_node
6715 || arg2
== error_mark_node
)
6720 if (TREE_CODE (arg2
) != INTEGER_CST
6721 || TREE_INT_CST_LOW (arg2
) & ~0x3)
6723 error ("argument to `%s' must be a 2-bit unsigned literal", d
->name
);
6727 if (! (*insn_data
[d
->icode
].operand
[0].predicate
) (op0
, mode0
))
6728 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
6729 if (! (*insn_data
[d
->icode
].operand
[1].predicate
) (op1
, mode1
))
6730 op1
= copy_to_mode_reg (mode1
, op1
);
6732 pat
= GEN_FCN (d
->icode
) (op0
, op1
, op2
);
6742 /* Expand the builtin in EXP and store the result in TARGET. Store
6743 true in *EXPANDEDP if we found a builtin to expand. */
6745 altivec_expand_builtin (tree exp
, rtx target
, bool *expandedp
)
6747 struct builtin_description
*d
;
6748 struct builtin_description_predicates
*dp
;
6750 enum insn_code icode
;
6751 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6752 tree arglist
= TREE_OPERAND (exp
, 1);
6755 enum machine_mode tmode
, mode0
;
6756 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
6758 target
= altivec_expand_ld_builtin (exp
, target
, expandedp
);
6762 target
= altivec_expand_st_builtin (exp
, target
, expandedp
);
6766 target
= altivec_expand_dst_builtin (exp
, target
, expandedp
);
6774 case ALTIVEC_BUILTIN_STVX
:
6775 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx
, arglist
);
6776 case ALTIVEC_BUILTIN_STVEBX
:
6777 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx
, arglist
);
6778 case ALTIVEC_BUILTIN_STVEHX
:
6779 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx
, arglist
);
6780 case ALTIVEC_BUILTIN_STVEWX
:
6781 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx
, arglist
);
6782 case ALTIVEC_BUILTIN_STVXL
:
6783 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl
, arglist
);
6785 case ALTIVEC_BUILTIN_MFVSCR
:
6786 icode
= CODE_FOR_altivec_mfvscr
;
6787 tmode
= insn_data
[icode
].operand
[0].mode
;
6790 || GET_MODE (target
) != tmode
6791 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6792 target
= gen_reg_rtx (tmode
);
6794 pat
= GEN_FCN (icode
) (target
);
6800 case ALTIVEC_BUILTIN_MTVSCR
:
6801 icode
= CODE_FOR_altivec_mtvscr
;
6802 arg0
= TREE_VALUE (arglist
);
6803 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6804 mode0
= insn_data
[icode
].operand
[0].mode
;
6806 /* If we got invalid arguments bail out before generating bad rtl. */
6807 if (arg0
== error_mark_node
)
6810 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
6811 op0
= copy_to_mode_reg (mode0
, op0
);
6813 pat
= GEN_FCN (icode
) (op0
);
6818 case ALTIVEC_BUILTIN_DSSALL
:
6819 emit_insn (gen_altivec_dssall ());
6822 case ALTIVEC_BUILTIN_DSS
:
6823 icode
= CODE_FOR_altivec_dss
;
6824 arg0
= TREE_VALUE (arglist
);
6826 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
6827 mode0
= insn_data
[icode
].operand
[0].mode
;
6829 /* If we got invalid arguments bail out before generating bad rtl. */
6830 if (arg0
== error_mark_node
)
6833 if (TREE_CODE (arg0
) != INTEGER_CST
6834 || TREE_INT_CST_LOW (arg0
) & ~0x3)
6836 error ("argument to dss must be a 2-bit unsigned literal");
6840 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
6841 op0
= copy_to_mode_reg (mode0
, op0
);
6843 emit_insn (gen_altivec_dss (op0
));
6846 case ALTIVEC_BUILTIN_COMPILETIME_ERROR
:
6847 arg0
= TREE_VALUE (arglist
);
6848 while (TREE_CODE (arg0
) == NOP_EXPR
|| TREE_CODE (arg0
) == ADDR_EXPR
6849 || TREE_CODE (arg0
) == ARRAY_REF
)
6850 arg0
= TREE_OPERAND (arg0
, 0);
6851 error ("invalid parameter combination for `%s' AltiVec intrinsic",
6852 TREE_STRING_POINTER (arg0
));
6857 /* Expand abs* operations. */
6858 d
= (struct builtin_description
*) bdesc_abs
;
6859 for (i
= 0; i
< ARRAY_SIZE (bdesc_abs
); i
++, d
++)
6860 if (d
->code
== fcode
)
6861 return altivec_expand_abs_builtin (d
->icode
, arglist
, target
);
6863 /* Expand the AltiVec predicates. */
6864 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
6865 for (i
= 0; i
< ARRAY_SIZE (bdesc_altivec_preds
); i
++, dp
++)
6866 if (dp
->code
== fcode
)
6867 return altivec_expand_predicate_builtin (dp
->icode
, dp
->opcode
, arglist
, target
);
6869 /* LV* are funky. We initialized them differently. */
6872 case ALTIVEC_BUILTIN_LVSL
:
6873 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl
,
6875 case ALTIVEC_BUILTIN_LVSR
:
6876 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr
,
6878 case ALTIVEC_BUILTIN_LVEBX
:
6879 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx
,
6881 case ALTIVEC_BUILTIN_LVEHX
:
6882 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx
,
6884 case ALTIVEC_BUILTIN_LVEWX
:
6885 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx
,
6887 case ALTIVEC_BUILTIN_LVXL
:
6888 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl
,
6890 case ALTIVEC_BUILTIN_LVX
:
6891 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx
,
6902 /* Binops that need to be initialized manually, but can be expanded
6903 automagically by rs6000_expand_binop_builtin. */
6904 static struct builtin_description bdesc_2arg_spe
[] =
6906 { 0, CODE_FOR_spe_evlddx
, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX
},
6907 { 0, CODE_FOR_spe_evldwx
, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX
},
6908 { 0, CODE_FOR_spe_evldhx
, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX
},
6909 { 0, CODE_FOR_spe_evlwhex
, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX
},
6910 { 0, CODE_FOR_spe_evlwhoux
, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX
},
6911 { 0, CODE_FOR_spe_evlwhosx
, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX
},
6912 { 0, CODE_FOR_spe_evlwwsplatx
, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX
},
6913 { 0, CODE_FOR_spe_evlwhsplatx
, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX
},
6914 { 0, CODE_FOR_spe_evlhhesplatx
, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX
},
6915 { 0, CODE_FOR_spe_evlhhousplatx
, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX
},
6916 { 0, CODE_FOR_spe_evlhhossplatx
, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX
},
6917 { 0, CODE_FOR_spe_evldd
, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD
},
6918 { 0, CODE_FOR_spe_evldw
, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW
},
6919 { 0, CODE_FOR_spe_evldh
, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH
},
6920 { 0, CODE_FOR_spe_evlwhe
, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE
},
6921 { 0, CODE_FOR_spe_evlwhou
, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU
},
6922 { 0, CODE_FOR_spe_evlwhos
, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS
},
6923 { 0, CODE_FOR_spe_evlwwsplat
, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT
},
6924 { 0, CODE_FOR_spe_evlwhsplat
, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT
},
6925 { 0, CODE_FOR_spe_evlhhesplat
, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT
},
6926 { 0, CODE_FOR_spe_evlhhousplat
, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT
},
6927 { 0, CODE_FOR_spe_evlhhossplat
, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT
}
6930 /* Expand the builtin in EXP and store the result in TARGET. Store
6931 true in *EXPANDEDP if we found a builtin to expand.
6933 This expands the SPE builtins that are not simple unary and binary
6936 spe_expand_builtin (tree exp
, rtx target
, bool *expandedp
)
6938 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6939 tree arglist
= TREE_OPERAND (exp
, 1);
6941 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
6942 enum insn_code icode
;
6943 enum machine_mode tmode
, mode0
;
6945 struct builtin_description
*d
;
6950 /* Syntax check for a 5-bit unsigned immediate. */
6953 case SPE_BUILTIN_EVSTDD
:
6954 case SPE_BUILTIN_EVSTDH
:
6955 case SPE_BUILTIN_EVSTDW
:
6956 case SPE_BUILTIN_EVSTWHE
:
6957 case SPE_BUILTIN_EVSTWHO
:
6958 case SPE_BUILTIN_EVSTWWE
:
6959 case SPE_BUILTIN_EVSTWWO
:
6960 arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
6961 if (TREE_CODE (arg1
) != INTEGER_CST
6962 || TREE_INT_CST_LOW (arg1
) & ~0x1f)
6964 error ("argument 2 must be a 5-bit unsigned literal");
6972 /* The evsplat*i instructions are not quite generic. */
6975 case SPE_BUILTIN_EVSPLATFI
:
6976 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi
,
6978 case SPE_BUILTIN_EVSPLATI
:
6979 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati
,
6985 d
= (struct builtin_description
*) bdesc_2arg_spe
;
6986 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg_spe
); ++i
, ++d
)
6987 if (d
->code
== fcode
)
6988 return rs6000_expand_binop_builtin (d
->icode
, arglist
, target
);
6990 d
= (struct builtin_description
*) bdesc_spe_predicates
;
6991 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_predicates
); ++i
, ++d
)
6992 if (d
->code
== fcode
)
6993 return spe_expand_predicate_builtin (d
->icode
, arglist
, target
);
6995 d
= (struct builtin_description
*) bdesc_spe_evsel
;
6996 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_evsel
); ++i
, ++d
)
6997 if (d
->code
== fcode
)
6998 return spe_expand_evsel_builtin (d
->icode
, arglist
, target
);
7002 case SPE_BUILTIN_EVSTDDX
:
7003 return spe_expand_stv_builtin (CODE_FOR_spe_evstddx
, arglist
);
7004 case SPE_BUILTIN_EVSTDHX
:
7005 return spe_expand_stv_builtin (CODE_FOR_spe_evstdhx
, arglist
);
7006 case SPE_BUILTIN_EVSTDWX
:
7007 return spe_expand_stv_builtin (CODE_FOR_spe_evstdwx
, arglist
);
7008 case SPE_BUILTIN_EVSTWHEX
:
7009 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhex
, arglist
);
7010 case SPE_BUILTIN_EVSTWHOX
:
7011 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhox
, arglist
);
7012 case SPE_BUILTIN_EVSTWWEX
:
7013 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwex
, arglist
);
7014 case SPE_BUILTIN_EVSTWWOX
:
7015 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwox
, arglist
);
7016 case SPE_BUILTIN_EVSTDD
:
7017 return spe_expand_stv_builtin (CODE_FOR_spe_evstdd
, arglist
);
7018 case SPE_BUILTIN_EVSTDH
:
7019 return spe_expand_stv_builtin (CODE_FOR_spe_evstdh
, arglist
);
7020 case SPE_BUILTIN_EVSTDW
:
7021 return spe_expand_stv_builtin (CODE_FOR_spe_evstdw
, arglist
);
7022 case SPE_BUILTIN_EVSTWHE
:
7023 return spe_expand_stv_builtin (CODE_FOR_spe_evstwhe
, arglist
);
7024 case SPE_BUILTIN_EVSTWHO
:
7025 return spe_expand_stv_builtin (CODE_FOR_spe_evstwho
, arglist
);
7026 case SPE_BUILTIN_EVSTWWE
:
7027 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwe
, arglist
);
7028 case SPE_BUILTIN_EVSTWWO
:
7029 return spe_expand_stv_builtin (CODE_FOR_spe_evstwwo
, arglist
);
7030 case SPE_BUILTIN_MFSPEFSCR
:
7031 icode
= CODE_FOR_spe_mfspefscr
;
7032 tmode
= insn_data
[icode
].operand
[0].mode
;
7035 || GET_MODE (target
) != tmode
7036 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
7037 target
= gen_reg_rtx (tmode
);
7039 pat
= GEN_FCN (icode
) (target
);
7044 case SPE_BUILTIN_MTSPEFSCR
:
7045 icode
= CODE_FOR_spe_mtspefscr
;
7046 arg0
= TREE_VALUE (arglist
);
7047 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
7048 mode0
= insn_data
[icode
].operand
[0].mode
;
7050 if (arg0
== error_mark_node
)
7053 if (! (*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
7054 op0
= copy_to_mode_reg (mode0
, op0
);
7056 pat
= GEN_FCN (icode
) (op0
);
7069 spe_expand_predicate_builtin (enum insn_code icode
, tree arglist
, rtx target
)
7071 rtx pat
, scratch
, tmp
;
7072 tree form
= TREE_VALUE (arglist
);
7073 tree arg0
= TREE_VALUE (TREE_CHAIN (arglist
));
7074 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
7075 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
7076 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
7077 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
7078 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
7082 if (TREE_CODE (form
) != INTEGER_CST
)
7084 error ("argument 1 of __builtin_spe_predicate must be a constant");
7088 form_int
= TREE_INT_CST_LOW (form
);
7093 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
7097 || GET_MODE (target
) != SImode
7098 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, SImode
))
7099 target
= gen_reg_rtx (SImode
);
7101 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
7102 op0
= copy_to_mode_reg (mode0
, op0
);
7103 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
7104 op1
= copy_to_mode_reg (mode1
, op1
);
7106 scratch
= gen_reg_rtx (CCmode
);
7108 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
);
7113 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
7114 _lower_. We use one compare, but look in different bits of the
7115 CR for each variant.
7117 There are 2 elements in each SPE simd type (upper/lower). The CR
7118 bits are set as follows:
7120 BIT0 | BIT 1 | BIT 2 | BIT 3
7121 U | L | (U | L) | (U & L)
7123 So, for an "all" relationship, BIT 3 would be set.
7124 For an "any" relationship, BIT 2 would be set. Etc.
7126 Following traditional nomenclature, these bits map to:
7128 BIT0 | BIT 1 | BIT 2 | BIT 3
7131 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
7136 /* All variant. OV bit. */
7138 /* We need to get to the OV bit, which is the ORDERED bit. We
7139 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
7140 that's ugly and will trigger a validate_condition_mode abort.
7141 So let's just use another pattern. */
7142 emit_insn (gen_move_from_CR_ov_bit (target
, scratch
));
7144 /* Any variant. EQ bit. */
7148 /* Upper variant. LT bit. */
7152 /* Lower variant. GT bit. */
7157 error ("argument 1 of __builtin_spe_predicate is out of range");
7161 tmp
= gen_rtx_fmt_ee (code
, SImode
, scratch
, const0_rtx
);
7162 emit_move_insn (target
, tmp
);
7167 /* The evsel builtins look like this:
7169 e = __builtin_spe_evsel_OP (a, b, c, d);
7173 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
7174 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
7178 spe_expand_evsel_builtin (enum insn_code icode
, tree arglist
, rtx target
)
7181 tree arg0
= TREE_VALUE (arglist
);
7182 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
7183 tree arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
7184 tree arg3
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist
))));
7185 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
7186 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
7187 rtx op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
7188 rtx op3
= expand_expr (arg3
, NULL_RTX
, VOIDmode
, 0);
7189 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
7190 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
7195 if (arg0
== error_mark_node
|| arg1
== error_mark_node
7196 || arg2
== error_mark_node
|| arg3
== error_mark_node
)
7200 || GET_MODE (target
) != mode0
7201 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, mode0
))
7202 target
= gen_reg_rtx (mode0
);
7204 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
7205 op0
= copy_to_mode_reg (mode0
, op0
);
7206 if (! (*insn_data
[icode
].operand
[1].predicate
) (op1
, mode1
))
7207 op1
= copy_to_mode_reg (mode0
, op1
);
7208 if (! (*insn_data
[icode
].operand
[1].predicate
) (op2
, mode1
))
7209 op2
= copy_to_mode_reg (mode0
, op2
);
7210 if (! (*insn_data
[icode
].operand
[1].predicate
) (op3
, mode1
))
7211 op3
= copy_to_mode_reg (mode0
, op3
);
7213 /* Generate the compare. */
7214 scratch
= gen_reg_rtx (CCmode
);
7215 pat
= GEN_FCN (icode
) (scratch
, op0
, op1
);
7220 if (mode0
== V2SImode
)
7221 emit_insn (gen_spe_evsel (target
, op2
, op3
, scratch
));
7223 emit_insn (gen_spe_evsel_fs (target
, op2
, op3
, scratch
));
7228 /* Expand an expression EXP that calls a built-in function,
7229 with result going to TARGET if that's convenient
7230 (and in mode MODE if that's convenient).
7231 SUBTARGET may be used as the target for computing one of EXP's operands.
7232 IGNORE is nonzero if the value is to be ignored. */
7235 rs6000_expand_builtin (tree exp
, rtx target
, rtx subtarget ATTRIBUTE_UNUSED
,
7236 enum machine_mode mode ATTRIBUTE_UNUSED
,
7237 int ignore ATTRIBUTE_UNUSED
)
7239 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7240 tree arglist
= TREE_OPERAND (exp
, 1);
7241 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
7242 struct builtin_description
*d
;
7247 if (fcode
== ALTIVEC_BUILTIN_MASK_FOR_LOAD
7248 || fcode
== ALTIVEC_BUILTIN_MASK_FOR_STORE
)
7250 int icode
= (int) CODE_FOR_altivec_lvsr
;
7251 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
7252 enum machine_mode mode
= insn_data
[icode
].operand
[1].mode
;
7256 if (!TARGET_ALTIVEC
)
7259 arg
= TREE_VALUE (arglist
);
7260 if (TREE_CODE (TREE_TYPE (arg
)) != POINTER_TYPE
)
7262 op
= expand_expr (arg
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
7263 addr
= memory_address (mode
, op
);
7264 if (fcode
== ALTIVEC_BUILTIN_MASK_FOR_STORE
)
7268 /* For the load case need to negate the address. */
7269 op
= gen_reg_rtx (GET_MODE (addr
));
7270 emit_insn (gen_rtx_SET (VOIDmode
, op
,
7271 gen_rtx_NEG (GET_MODE (addr
), addr
)));
7273 op
= gen_rtx_MEM (mode
, op
);
7276 || GET_MODE (target
) != tmode
7277 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
7278 target
= gen_reg_rtx (tmode
);
7280 /*pat = gen_altivec_lvsr (target, op);*/
7281 pat
= GEN_FCN (icode
) (target
, op
);
7291 ret
= altivec_expand_builtin (exp
, target
, &success
);
7298 ret
= spe_expand_builtin (exp
, target
, &success
);
7304 if (TARGET_ALTIVEC
|| TARGET_SPE
)
7306 /* Handle simple unary operations. */
7307 d
= (struct builtin_description
*) bdesc_1arg
;
7308 for (i
= 0; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
7309 if (d
->code
== fcode
)
7310 return rs6000_expand_unop_builtin (d
->icode
, arglist
, target
);
7312 /* Handle simple binary operations. */
7313 d
= (struct builtin_description
*) bdesc_2arg
;
7314 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
7315 if (d
->code
== fcode
)
7316 return rs6000_expand_binop_builtin (d
->icode
, arglist
, target
);
7318 /* Handle simple ternary operations. */
7319 d
= (struct builtin_description
*) bdesc_3arg
;
7320 for (i
= 0; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
7321 if (d
->code
== fcode
)
7322 return rs6000_expand_ternop_builtin (d
->icode
, arglist
, target
);
7330 build_opaque_vector_type (tree node
, int nunits
)
7332 node
= copy_node (node
);
7333 TYPE_MAIN_VARIANT (node
) = node
;
7334 return build_vector_type (node
, nunits
);
7338 rs6000_init_builtins (void)
7340 V2SI_type_node
= build_vector_type (intSI_type_node
, 2);
7341 V2SF_type_node
= build_vector_type (float_type_node
, 2);
7342 V4HI_type_node
= build_vector_type (intHI_type_node
, 4);
7343 V4SI_type_node
= build_vector_type (intSI_type_node
, 4);
7344 V4SF_type_node
= build_vector_type (float_type_node
, 4);
7345 V8HI_type_node
= build_vector_type (intHI_type_node
, 8);
7346 V16QI_type_node
= build_vector_type (intQI_type_node
, 16);
7348 unsigned_V16QI_type_node
= build_vector_type (unsigned_intQI_type_node
, 16);
7349 unsigned_V8HI_type_node
= build_vector_type (unsigned_intHI_type_node
, 8);
7350 unsigned_V4SI_type_node
= build_vector_type (unsigned_intSI_type_node
, 4);
7352 opaque_V2SF_type_node
= build_opaque_vector_type (float_type_node
, 2);
7353 opaque_V2SI_type_node
= build_opaque_vector_type (intSI_type_node
, 2);
7354 opaque_p_V2SI_type_node
= build_pointer_type (opaque_V2SI_type_node
);
7356 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
7357 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
7358 'vector unsigned short'. */
7360 bool_char_type_node
= build_distinct_type_copy (unsigned_intQI_type_node
);
7361 bool_short_type_node
= build_distinct_type_copy (unsigned_intHI_type_node
);
7362 bool_int_type_node
= build_distinct_type_copy (unsigned_intSI_type_node
);
7363 pixel_type_node
= build_distinct_type_copy (unsigned_intHI_type_node
);
7365 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7366 get_identifier ("__bool char"),
7367 bool_char_type_node
));
7368 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7369 get_identifier ("__bool short"),
7370 bool_short_type_node
));
7371 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7372 get_identifier ("__bool int"),
7373 bool_int_type_node
));
7374 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7375 get_identifier ("__pixel"),
7378 bool_V16QI_type_node
= build_vector_type (bool_char_type_node
, 16);
7379 bool_V8HI_type_node
= build_vector_type (bool_short_type_node
, 8);
7380 bool_V4SI_type_node
= build_vector_type (bool_int_type_node
, 4);
7381 pixel_V8HI_type_node
= build_vector_type (pixel_type_node
, 8);
7383 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7384 get_identifier ("__vector unsigned char"),
7385 unsigned_V16QI_type_node
));
7386 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7387 get_identifier ("__vector signed char"),
7389 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7390 get_identifier ("__vector __bool char"),
7391 bool_V16QI_type_node
));
7393 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7394 get_identifier ("__vector unsigned short"),
7395 unsigned_V8HI_type_node
));
7396 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7397 get_identifier ("__vector signed short"),
7399 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7400 get_identifier ("__vector __bool short"),
7401 bool_V8HI_type_node
));
7403 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7404 get_identifier ("__vector unsigned int"),
7405 unsigned_V4SI_type_node
));
7406 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7407 get_identifier ("__vector signed int"),
7409 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7410 get_identifier ("__vector __bool int"),
7411 bool_V4SI_type_node
));
7413 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7414 get_identifier ("__vector float"),
7416 (*lang_hooks
.decls
.pushdecl
) (build_decl (TYPE_DECL
,
7417 get_identifier ("__vector __pixel"),
7418 pixel_V8HI_type_node
));
7421 spe_init_builtins ();
7423 altivec_init_builtins ();
7424 if (TARGET_ALTIVEC
|| TARGET_SPE
)
7425 rs6000_common_init_builtins ();
7428 /* Search through a set of builtins and enable the mask bits.
7429 DESC is an array of builtins.
7430 SIZE is the total number of builtins.
7431 START is the builtin enum at which to start.
7432 END is the builtin enum at which to end. */
7434 enable_mask_for_builtins (struct builtin_description
*desc
, int size
,
7435 enum rs6000_builtins start
,
7436 enum rs6000_builtins end
)
7440 for (i
= 0; i
< size
; ++i
)
7441 if (desc
[i
].code
== start
)
7447 for (; i
< size
; ++i
)
7449 /* Flip all the bits on. */
7450 desc
[i
].mask
= target_flags
;
7451 if (desc
[i
].code
== end
)
7457 spe_init_builtins (void)
7459 tree endlink
= void_list_node
;
7460 tree puint_type_node
= build_pointer_type (unsigned_type_node
);
7461 tree pushort_type_node
= build_pointer_type (short_unsigned_type_node
);
7462 struct builtin_description
*d
;
7465 tree v2si_ftype_4_v2si
7466 = build_function_type
7467 (opaque_V2SI_type_node
,
7468 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
7469 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
7470 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
7471 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
7474 tree v2sf_ftype_4_v2sf
7475 = build_function_type
7476 (opaque_V2SF_type_node
,
7477 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
7478 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
7479 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
7480 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
7483 tree int_ftype_int_v2si_v2si
7484 = build_function_type
7486 tree_cons (NULL_TREE
, integer_type_node
,
7487 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
7488 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
7491 tree int_ftype_int_v2sf_v2sf
7492 = build_function_type
7494 tree_cons (NULL_TREE
, integer_type_node
,
7495 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
7496 tree_cons (NULL_TREE
, opaque_V2SF_type_node
,
7499 tree void_ftype_v2si_puint_int
7500 = build_function_type (void_type_node
,
7501 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
7502 tree_cons (NULL_TREE
, puint_type_node
,
7503 tree_cons (NULL_TREE
,
7507 tree void_ftype_v2si_puint_char
7508 = build_function_type (void_type_node
,
7509 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
7510 tree_cons (NULL_TREE
, puint_type_node
,
7511 tree_cons (NULL_TREE
,
7515 tree void_ftype_v2si_pv2si_int
7516 = build_function_type (void_type_node
,
7517 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
7518 tree_cons (NULL_TREE
, opaque_p_V2SI_type_node
,
7519 tree_cons (NULL_TREE
,
7523 tree void_ftype_v2si_pv2si_char
7524 = build_function_type (void_type_node
,
7525 tree_cons (NULL_TREE
, opaque_V2SI_type_node
,
7526 tree_cons (NULL_TREE
, opaque_p_V2SI_type_node
,
7527 tree_cons (NULL_TREE
,
7532 = build_function_type (void_type_node
,
7533 tree_cons (NULL_TREE
, integer_type_node
, endlink
));
7536 = build_function_type (integer_type_node
, endlink
);
7538 tree v2si_ftype_pv2si_int
7539 = build_function_type (opaque_V2SI_type_node
,
7540 tree_cons (NULL_TREE
, opaque_p_V2SI_type_node
,
7541 tree_cons (NULL_TREE
, integer_type_node
,
7544 tree v2si_ftype_puint_int
7545 = build_function_type (opaque_V2SI_type_node
,
7546 tree_cons (NULL_TREE
, puint_type_node
,
7547 tree_cons (NULL_TREE
, integer_type_node
,
7550 tree v2si_ftype_pushort_int
7551 = build_function_type (opaque_V2SI_type_node
,
7552 tree_cons (NULL_TREE
, pushort_type_node
,
7553 tree_cons (NULL_TREE
, integer_type_node
,
7556 tree v2si_ftype_signed_char
7557 = build_function_type (opaque_V2SI_type_node
,
7558 tree_cons (NULL_TREE
, signed_char_type_node
,
7561 /* The initialization of the simple binary and unary builtins is
7562 done in rs6000_common_init_builtins, but we have to enable the
7563 mask bits here manually because we have run out of `target_flags'
7564 bits. We really need to redesign this mask business. */
7566 enable_mask_for_builtins ((struct builtin_description
*) bdesc_2arg
,
7567 ARRAY_SIZE (bdesc_2arg
),
7570 enable_mask_for_builtins ((struct builtin_description
*) bdesc_1arg
,
7571 ARRAY_SIZE (bdesc_1arg
),
7573 SPE_BUILTIN_EVSUBFUSIAAW
);
7574 enable_mask_for_builtins ((struct builtin_description
*) bdesc_spe_predicates
,
7575 ARRAY_SIZE (bdesc_spe_predicates
),
7576 SPE_BUILTIN_EVCMPEQ
,
7577 SPE_BUILTIN_EVFSTSTLT
);
7578 enable_mask_for_builtins ((struct builtin_description
*) bdesc_spe_evsel
,
7579 ARRAY_SIZE (bdesc_spe_evsel
),
7580 SPE_BUILTIN_EVSEL_CMPGTS
,
7581 SPE_BUILTIN_EVSEL_FSTSTEQ
);
7583 (*lang_hooks
.decls
.pushdecl
)
7584 (build_decl (TYPE_DECL
, get_identifier ("__ev64_opaque__"),
7585 opaque_V2SI_type_node
));
7587 /* Initialize irregular SPE builtins. */
7589 def_builtin (target_flags
, "__builtin_spe_mtspefscr", void_ftype_int
, SPE_BUILTIN_MTSPEFSCR
);
7590 def_builtin (target_flags
, "__builtin_spe_mfspefscr", int_ftype_void
, SPE_BUILTIN_MFSPEFSCR
);
7591 def_builtin (target_flags
, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDDX
);
7592 def_builtin (target_flags
, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDHX
);
7593 def_builtin (target_flags
, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int
, SPE_BUILTIN_EVSTDWX
);
7594 def_builtin (target_flags
, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWHEX
);
7595 def_builtin (target_flags
, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWHOX
);
7596 def_builtin (target_flags
, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWWEX
);
7597 def_builtin (target_flags
, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int
, SPE_BUILTIN_EVSTWWOX
);
7598 def_builtin (target_flags
, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDD
);
7599 def_builtin (target_flags
, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDH
);
7600 def_builtin (target_flags
, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char
, SPE_BUILTIN_EVSTDW
);
7601 def_builtin (target_flags
, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWHE
);
7602 def_builtin (target_flags
, "__builtin_spe_evstwho", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWHO
);
7603 def_builtin (target_flags
, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWWE
);
7604 def_builtin (target_flags
, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char
, SPE_BUILTIN_EVSTWWO
);
7605 def_builtin (target_flags
, "__builtin_spe_evsplatfi", v2si_ftype_signed_char
, SPE_BUILTIN_EVSPLATFI
);
7606 def_builtin (target_flags
, "__builtin_spe_evsplati", v2si_ftype_signed_char
, SPE_BUILTIN_EVSPLATI
);
7609 def_builtin (target_flags
, "__builtin_spe_evlddx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDDX
);
7610 def_builtin (target_flags
, "__builtin_spe_evldwx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDWX
);
7611 def_builtin (target_flags
, "__builtin_spe_evldhx", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDHX
);
7612 def_builtin (target_flags
, "__builtin_spe_evlwhex", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHEX
);
7613 def_builtin (target_flags
, "__builtin_spe_evlwhoux", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOUX
);
7614 def_builtin (target_flags
, "__builtin_spe_evlwhosx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOSX
);
7615 def_builtin (target_flags
, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWWSPLATX
);
7616 def_builtin (target_flags
, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHSPLATX
);
7617 def_builtin (target_flags
, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHESPLATX
);
7618 def_builtin (target_flags
, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOUSPLATX
);
7619 def_builtin (target_flags
, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOSSPLATX
);
7620 def_builtin (target_flags
, "__builtin_spe_evldd", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDD
);
7621 def_builtin (target_flags
, "__builtin_spe_evldw", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDW
);
7622 def_builtin (target_flags
, "__builtin_spe_evldh", v2si_ftype_pv2si_int
, SPE_BUILTIN_EVLDH
);
7623 def_builtin (target_flags
, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHESPLAT
);
7624 def_builtin (target_flags
, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOSSPLAT
);
7625 def_builtin (target_flags
, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int
, SPE_BUILTIN_EVLHHOUSPLAT
);
7626 def_builtin (target_flags
, "__builtin_spe_evlwhe", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHE
);
7627 def_builtin (target_flags
, "__builtin_spe_evlwhos", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOS
);
7628 def_builtin (target_flags
, "__builtin_spe_evlwhou", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHOU
);
7629 def_builtin (target_flags
, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWHSPLAT
);
7630 def_builtin (target_flags
, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int
, SPE_BUILTIN_EVLWWSPLAT
);
7633 d
= (struct builtin_description
*) bdesc_spe_predicates
;
7634 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_predicates
); ++i
, d
++)
7638 switch (insn_data
[d
->icode
].operand
[1].mode
)
7641 type
= int_ftype_int_v2si_v2si
;
7644 type
= int_ftype_int_v2sf_v2sf
;
7650 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
7653 /* Evsel predicates. */
7654 d
= (struct builtin_description
*) bdesc_spe_evsel
;
7655 for (i
= 0; i
< ARRAY_SIZE (bdesc_spe_evsel
); ++i
, d
++)
7659 switch (insn_data
[d
->icode
].operand
[1].mode
)
7662 type
= v2si_ftype_4_v2si
;
7665 type
= v2sf_ftype_4_v2sf
;
7671 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
7676 altivec_init_builtins (void)
7678 struct builtin_description
*d
;
7679 struct builtin_description_predicates
*dp
;
7681 tree pfloat_type_node
= build_pointer_type (float_type_node
);
7682 tree pint_type_node
= build_pointer_type (integer_type_node
);
7683 tree pshort_type_node
= build_pointer_type (short_integer_type_node
);
7684 tree pchar_type_node
= build_pointer_type (char_type_node
);
7686 tree pvoid_type_node
= build_pointer_type (void_type_node
);
7688 tree pcfloat_type_node
= build_pointer_type (build_qualified_type (float_type_node
, TYPE_QUAL_CONST
));
7689 tree pcint_type_node
= build_pointer_type (build_qualified_type (integer_type_node
, TYPE_QUAL_CONST
));
7690 tree pcshort_type_node
= build_pointer_type (build_qualified_type (short_integer_type_node
, TYPE_QUAL_CONST
));
7691 tree pcchar_type_node
= build_pointer_type (build_qualified_type (char_type_node
, TYPE_QUAL_CONST
));
7693 tree pcvoid_type_node
= build_pointer_type (build_qualified_type (void_type_node
, TYPE_QUAL_CONST
));
7695 tree int_ftype_int_v4si_v4si
7696 = build_function_type_list (integer_type_node
,
7697 integer_type_node
, V4SI_type_node
,
7698 V4SI_type_node
, NULL_TREE
);
7699 tree v4sf_ftype_pcfloat
7700 = build_function_type_list (V4SF_type_node
, pcfloat_type_node
, NULL_TREE
);
7701 tree void_ftype_pfloat_v4sf
7702 = build_function_type_list (void_type_node
,
7703 pfloat_type_node
, V4SF_type_node
, NULL_TREE
);
7704 tree v4si_ftype_pcint
7705 = build_function_type_list (V4SI_type_node
, pcint_type_node
, NULL_TREE
);
7706 tree void_ftype_pint_v4si
7707 = build_function_type_list (void_type_node
,
7708 pint_type_node
, V4SI_type_node
, NULL_TREE
);
7709 tree v8hi_ftype_pcshort
7710 = build_function_type_list (V8HI_type_node
, pcshort_type_node
, NULL_TREE
);
7711 tree void_ftype_pshort_v8hi
7712 = build_function_type_list (void_type_node
,
7713 pshort_type_node
, V8HI_type_node
, NULL_TREE
);
7714 tree v16qi_ftype_pcchar
7715 = build_function_type_list (V16QI_type_node
, pcchar_type_node
, NULL_TREE
);
7716 tree void_ftype_pchar_v16qi
7717 = build_function_type_list (void_type_node
,
7718 pchar_type_node
, V16QI_type_node
, NULL_TREE
);
7719 tree void_ftype_v4si
7720 = build_function_type_list (void_type_node
, V4SI_type_node
, NULL_TREE
);
7721 tree v8hi_ftype_void
7722 = build_function_type (V8HI_type_node
, void_list_node
);
7723 tree void_ftype_void
7724 = build_function_type (void_type_node
, void_list_node
);
7726 = build_function_type_list (void_type_node
, integer_type_node
, NULL_TREE
);
7728 tree v16qi_ftype_long_pcvoid
7729 = build_function_type_list (V16QI_type_node
,
7730 long_integer_type_node
, pcvoid_type_node
, NULL_TREE
);
7731 tree v8hi_ftype_long_pcvoid
7732 = build_function_type_list (V8HI_type_node
,
7733 long_integer_type_node
, pcvoid_type_node
, NULL_TREE
);
7734 tree v4si_ftype_long_pcvoid
7735 = build_function_type_list (V4SI_type_node
,
7736 long_integer_type_node
, pcvoid_type_node
, NULL_TREE
);
7738 tree void_ftype_v4si_long_pvoid
7739 = build_function_type_list (void_type_node
,
7740 V4SI_type_node
, long_integer_type_node
,
7741 pvoid_type_node
, NULL_TREE
);
7742 tree void_ftype_v16qi_long_pvoid
7743 = build_function_type_list (void_type_node
,
7744 V16QI_type_node
, long_integer_type_node
,
7745 pvoid_type_node
, NULL_TREE
);
7746 tree void_ftype_v8hi_long_pvoid
7747 = build_function_type_list (void_type_node
,
7748 V8HI_type_node
, long_integer_type_node
,
7749 pvoid_type_node
, NULL_TREE
);
7750 tree int_ftype_int_v8hi_v8hi
7751 = build_function_type_list (integer_type_node
,
7752 integer_type_node
, V8HI_type_node
,
7753 V8HI_type_node
, NULL_TREE
);
7754 tree int_ftype_int_v16qi_v16qi
7755 = build_function_type_list (integer_type_node
,
7756 integer_type_node
, V16QI_type_node
,
7757 V16QI_type_node
, NULL_TREE
);
7758 tree int_ftype_int_v4sf_v4sf
7759 = build_function_type_list (integer_type_node
,
7760 integer_type_node
, V4SF_type_node
,
7761 V4SF_type_node
, NULL_TREE
);
7762 tree v4si_ftype_v4si
7763 = build_function_type_list (V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
7764 tree v8hi_ftype_v8hi
7765 = build_function_type_list (V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
7766 tree v16qi_ftype_v16qi
7767 = build_function_type_list (V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
7768 tree v4sf_ftype_v4sf
7769 = build_function_type_list (V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
7770 tree void_ftype_pcvoid_int_int
7771 = build_function_type_list (void_type_node
,
7772 pcvoid_type_node
, integer_type_node
,
7773 integer_type_node
, NULL_TREE
);
7774 tree int_ftype_pcchar
7775 = build_function_type_list (integer_type_node
,
7776 pcchar_type_node
, NULL_TREE
);
7781 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat
,
7782 ALTIVEC_BUILTIN_LD_INTERNAL_4sf
);
7783 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf
,
7784 ALTIVEC_BUILTIN_ST_INTERNAL_4sf
);
7785 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint
,
7786 ALTIVEC_BUILTIN_LD_INTERNAL_4si
);
7787 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si
,
7788 ALTIVEC_BUILTIN_ST_INTERNAL_4si
);
7789 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort
,
7790 ALTIVEC_BUILTIN_LD_INTERNAL_8hi
);
7791 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi
,
7792 ALTIVEC_BUILTIN_ST_INTERNAL_8hi
);
7793 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar
,
7794 ALTIVEC_BUILTIN_LD_INTERNAL_16qi
);
7795 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi
,
7796 ALTIVEC_BUILTIN_ST_INTERNAL_16qi
);
7797 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mtvscr", void_ftype_v4si
, ALTIVEC_BUILTIN_MTVSCR
);
7798 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_mfvscr", v8hi_ftype_void
, ALTIVEC_BUILTIN_MFVSCR
);
7799 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dssall", void_ftype_void
, ALTIVEC_BUILTIN_DSSALL
);
7800 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_dss", void_ftype_int
, ALTIVEC_BUILTIN_DSS
);
7801 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVSL
);
7802 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVSR
);
7803 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVEBX
);
7804 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVEHX
);
7805 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVEWX
);
7806 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVXL
);
7807 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid
, ALTIVEC_BUILTIN_LVX
);
7808 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid
, ALTIVEC_BUILTIN_STVX
);
7809 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid
, ALTIVEC_BUILTIN_STVEWX
);
7810 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid
, ALTIVEC_BUILTIN_STVXL
);
7811 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid
, ALTIVEC_BUILTIN_STVEBX
);
7812 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid
, ALTIVEC_BUILTIN_STVEHX
);
7814 /* See altivec.h for usage of "__builtin_altivec_compiletime_error". */
7815 def_builtin (MASK_ALTIVEC
, "__builtin_altivec_compiletime_error", int_ftype_pcchar
,
7816 ALTIVEC_BUILTIN_COMPILETIME_ERROR
);
7818 /* Add the DST variants. */
7819 d
= (struct builtin_description
*) bdesc_dst
;
7820 for (i
= 0; i
< ARRAY_SIZE (bdesc_dst
); i
++, d
++)
7821 def_builtin (d
->mask
, d
->name
, void_ftype_pcvoid_int_int
, d
->code
);
7823 /* Initialize the predicates. */
7824 dp
= (struct builtin_description_predicates
*) bdesc_altivec_preds
;
7825 for (i
= 0; i
< ARRAY_SIZE (bdesc_altivec_preds
); i
++, dp
++)
7827 enum machine_mode mode1
;
7830 mode1
= insn_data
[dp
->icode
].operand
[1].mode
;
7835 type
= int_ftype_int_v4si_v4si
;
7838 type
= int_ftype_int_v8hi_v8hi
;
7841 type
= int_ftype_int_v16qi_v16qi
;
7844 type
= int_ftype_int_v4sf_v4sf
;
7850 def_builtin (dp
->mask
, dp
->name
, type
, dp
->code
);
7853 /* Initialize the abs* operators. */
7854 d
= (struct builtin_description
*) bdesc_abs
;
7855 for (i
= 0; i
< ARRAY_SIZE (bdesc_abs
); i
++, d
++)
7857 enum machine_mode mode0
;
7860 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
7865 type
= v4si_ftype_v4si
;
7868 type
= v8hi_ftype_v8hi
;
7871 type
= v16qi_ftype_v16qi
;
7874 type
= v4sf_ftype_v4sf
;
7880 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
7883 /* Initialize target builtin that implements
7884 targetm.vectorize.builtin_mask_for_load. */
7885 id
= get_identifier ("__builtin_altivec_mask_for_load");
7886 decl
= build_decl (FUNCTION_DECL
, id
, v16qi_ftype_long_pcvoid
);
7887 DECL_BUILT_IN_CLASS (decl
) = BUILT_IN_MD
;
7888 DECL_FUNCTION_CODE (decl
) = ALTIVEC_BUILTIN_MASK_FOR_LOAD
;
7889 /* Record the decl. Will be used by rs6000_builtin_mask_for_load. */
7890 altivec_builtin_mask_for_load
= decl
;
7892 /* Initialize target builtin that implements
7893 targetm.vectorize.builtin_mask_for_store. */
7894 id
= get_identifier ("__builtin_altivec_mask_for_store");
7895 decl
= build_decl (FUNCTION_DECL
, id
, v16qi_ftype_long_pcvoid
);
7896 DECL_BUILT_IN_CLASS (decl
) = BUILT_IN_MD
;
7897 DECL_FUNCTION_CODE (decl
) = ALTIVEC_BUILTIN_MASK_FOR_STORE
;
7898 /* Record the decl. Will be used by rs6000_builtin_mask_for_store. */
7899 altivec_builtin_mask_for_store
= decl
;
7903 rs6000_common_init_builtins (void)
7905 struct builtin_description
*d
;
7908 tree v4sf_ftype_v4sf_v4sf_v16qi
7909 = build_function_type_list (V4SF_type_node
,
7910 V4SF_type_node
, V4SF_type_node
,
7911 V16QI_type_node
, NULL_TREE
);
7912 tree v4si_ftype_v4si_v4si_v16qi
7913 = build_function_type_list (V4SI_type_node
,
7914 V4SI_type_node
, V4SI_type_node
,
7915 V16QI_type_node
, NULL_TREE
);
7916 tree v8hi_ftype_v8hi_v8hi_v16qi
7917 = build_function_type_list (V8HI_type_node
,
7918 V8HI_type_node
, V8HI_type_node
,
7919 V16QI_type_node
, NULL_TREE
);
7920 tree v16qi_ftype_v16qi_v16qi_v16qi
7921 = build_function_type_list (V16QI_type_node
,
7922 V16QI_type_node
, V16QI_type_node
,
7923 V16QI_type_node
, NULL_TREE
);
7925 = build_function_type_list (V4SI_type_node
, integer_type_node
, NULL_TREE
);
7927 = build_function_type_list (V8HI_type_node
, integer_type_node
, NULL_TREE
);
7928 tree v16qi_ftype_int
7929 = build_function_type_list (V16QI_type_node
, integer_type_node
, NULL_TREE
);
7930 tree v8hi_ftype_v16qi
7931 = build_function_type_list (V8HI_type_node
, V16QI_type_node
, NULL_TREE
);
7932 tree v4sf_ftype_v4sf
7933 = build_function_type_list (V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
7935 tree v2si_ftype_v2si_v2si
7936 = build_function_type_list (opaque_V2SI_type_node
,
7937 opaque_V2SI_type_node
,
7938 opaque_V2SI_type_node
, NULL_TREE
);
7940 tree v2sf_ftype_v2sf_v2sf
7941 = build_function_type_list (opaque_V2SF_type_node
,
7942 opaque_V2SF_type_node
,
7943 opaque_V2SF_type_node
, NULL_TREE
);
7945 tree v2si_ftype_int_int
7946 = build_function_type_list (opaque_V2SI_type_node
,
7947 integer_type_node
, integer_type_node
,
7950 tree v2si_ftype_v2si
7951 = build_function_type_list (opaque_V2SI_type_node
,
7952 opaque_V2SI_type_node
, NULL_TREE
);
7954 tree v2sf_ftype_v2sf
7955 = build_function_type_list (opaque_V2SF_type_node
,
7956 opaque_V2SF_type_node
, NULL_TREE
);
7958 tree v2sf_ftype_v2si
7959 = build_function_type_list (opaque_V2SF_type_node
,
7960 opaque_V2SI_type_node
, NULL_TREE
);
7962 tree v2si_ftype_v2sf
7963 = build_function_type_list (opaque_V2SI_type_node
,
7964 opaque_V2SF_type_node
, NULL_TREE
);
7966 tree v2si_ftype_v2si_char
7967 = build_function_type_list (opaque_V2SI_type_node
,
7968 opaque_V2SI_type_node
,
7969 char_type_node
, NULL_TREE
);
7971 tree v2si_ftype_int_char
7972 = build_function_type_list (opaque_V2SI_type_node
,
7973 integer_type_node
, char_type_node
, NULL_TREE
);
7975 tree v2si_ftype_char
7976 = build_function_type_list (opaque_V2SI_type_node
,
7977 char_type_node
, NULL_TREE
);
7979 tree int_ftype_int_int
7980 = build_function_type_list (integer_type_node
,
7981 integer_type_node
, integer_type_node
,
7984 tree v4si_ftype_v4si_v4si
7985 = build_function_type_list (V4SI_type_node
,
7986 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
7987 tree v4sf_ftype_v4si_int
7988 = build_function_type_list (V4SF_type_node
,
7989 V4SI_type_node
, integer_type_node
, NULL_TREE
);
7990 tree v4si_ftype_v4sf_int
7991 = build_function_type_list (V4SI_type_node
,
7992 V4SF_type_node
, integer_type_node
, NULL_TREE
);
7993 tree v4si_ftype_v4si_int
7994 = build_function_type_list (V4SI_type_node
,
7995 V4SI_type_node
, integer_type_node
, NULL_TREE
);
7996 tree v8hi_ftype_v8hi_int
7997 = build_function_type_list (V8HI_type_node
,
7998 V8HI_type_node
, integer_type_node
, NULL_TREE
);
7999 tree v16qi_ftype_v16qi_int
8000 = build_function_type_list (V16QI_type_node
,
8001 V16QI_type_node
, integer_type_node
, NULL_TREE
);
8002 tree v16qi_ftype_v16qi_v16qi_int
8003 = build_function_type_list (V16QI_type_node
,
8004 V16QI_type_node
, V16QI_type_node
,
8005 integer_type_node
, NULL_TREE
);
8006 tree v8hi_ftype_v8hi_v8hi_int
8007 = build_function_type_list (V8HI_type_node
,
8008 V8HI_type_node
, V8HI_type_node
,
8009 integer_type_node
, NULL_TREE
);
8010 tree v4si_ftype_v4si_v4si_int
8011 = build_function_type_list (V4SI_type_node
,
8012 V4SI_type_node
, V4SI_type_node
,
8013 integer_type_node
, NULL_TREE
);
8014 tree v4sf_ftype_v4sf_v4sf_int
8015 = build_function_type_list (V4SF_type_node
,
8016 V4SF_type_node
, V4SF_type_node
,
8017 integer_type_node
, NULL_TREE
);
8018 tree v4sf_ftype_v4sf_v4sf
8019 = build_function_type_list (V4SF_type_node
,
8020 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
8021 tree v4sf_ftype_v4sf_v4sf_v4si
8022 = build_function_type_list (V4SF_type_node
,
8023 V4SF_type_node
, V4SF_type_node
,
8024 V4SI_type_node
, NULL_TREE
);
8025 tree v4sf_ftype_v4sf_v4sf_v4sf
8026 = build_function_type_list (V4SF_type_node
,
8027 V4SF_type_node
, V4SF_type_node
,
8028 V4SF_type_node
, NULL_TREE
);
8029 tree v4si_ftype_v4si_v4si_v4si
8030 = build_function_type_list (V4SI_type_node
,
8031 V4SI_type_node
, V4SI_type_node
,
8032 V4SI_type_node
, NULL_TREE
);
8033 tree v8hi_ftype_v8hi_v8hi
8034 = build_function_type_list (V8HI_type_node
,
8035 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
8036 tree v8hi_ftype_v8hi_v8hi_v8hi
8037 = build_function_type_list (V8HI_type_node
,
8038 V8HI_type_node
, V8HI_type_node
,
8039 V8HI_type_node
, NULL_TREE
);
8040 tree v4si_ftype_v8hi_v8hi_v4si
8041 = build_function_type_list (V4SI_type_node
,
8042 V8HI_type_node
, V8HI_type_node
,
8043 V4SI_type_node
, NULL_TREE
);
8044 tree v4si_ftype_v16qi_v16qi_v4si
8045 = build_function_type_list (V4SI_type_node
,
8046 V16QI_type_node
, V16QI_type_node
,
8047 V4SI_type_node
, NULL_TREE
);
8048 tree v16qi_ftype_v16qi_v16qi
8049 = build_function_type_list (V16QI_type_node
,
8050 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
8051 tree v4si_ftype_v4sf_v4sf
8052 = build_function_type_list (V4SI_type_node
,
8053 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
8054 tree v8hi_ftype_v16qi_v16qi
8055 = build_function_type_list (V8HI_type_node
,
8056 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
8057 tree v4si_ftype_v8hi_v8hi
8058 = build_function_type_list (V4SI_type_node
,
8059 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
8060 tree v8hi_ftype_v4si_v4si
8061 = build_function_type_list (V8HI_type_node
,
8062 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
8063 tree v16qi_ftype_v8hi_v8hi
8064 = build_function_type_list (V16QI_type_node
,
8065 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
8066 tree v4si_ftype_v16qi_v4si
8067 = build_function_type_list (V4SI_type_node
,
8068 V16QI_type_node
, V4SI_type_node
, NULL_TREE
);
8069 tree v4si_ftype_v16qi_v16qi
8070 = build_function_type_list (V4SI_type_node
,
8071 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
8072 tree v4si_ftype_v8hi_v4si
8073 = build_function_type_list (V4SI_type_node
,
8074 V8HI_type_node
, V4SI_type_node
, NULL_TREE
);
8075 tree v4si_ftype_v8hi
8076 = build_function_type_list (V4SI_type_node
, V8HI_type_node
, NULL_TREE
);
8077 tree int_ftype_v4si_v4si
8078 = build_function_type_list (integer_type_node
,
8079 V4SI_type_node
, V4SI_type_node
, NULL_TREE
);
8080 tree int_ftype_v4sf_v4sf
8081 = build_function_type_list (integer_type_node
,
8082 V4SF_type_node
, V4SF_type_node
, NULL_TREE
);
8083 tree int_ftype_v16qi_v16qi
8084 = build_function_type_list (integer_type_node
,
8085 V16QI_type_node
, V16QI_type_node
, NULL_TREE
);
8086 tree int_ftype_v8hi_v8hi
8087 = build_function_type_list (integer_type_node
,
8088 V8HI_type_node
, V8HI_type_node
, NULL_TREE
);
8090 /* Add the simple ternary operators. */
8091 d
= (struct builtin_description
*) bdesc_3arg
;
8092 for (i
= 0; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
8095 enum machine_mode mode0
, mode1
, mode2
, mode3
;
8098 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
8101 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
8102 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
8103 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
8104 mode3
= insn_data
[d
->icode
].operand
[3].mode
;
8106 /* When all four are of the same mode. */
8107 if (mode0
== mode1
&& mode1
== mode2
&& mode2
== mode3
)
8112 type
= v4si_ftype_v4si_v4si_v4si
;
8115 type
= v4sf_ftype_v4sf_v4sf_v4sf
;
8118 type
= v8hi_ftype_v8hi_v8hi_v8hi
;
8121 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
8127 else if (mode0
== mode1
&& mode1
== mode2
&& mode3
== V16QImode
)
8132 type
= v4si_ftype_v4si_v4si_v16qi
;
8135 type
= v4sf_ftype_v4sf_v4sf_v16qi
;
8138 type
= v8hi_ftype_v8hi_v8hi_v16qi
;
8141 type
= v16qi_ftype_v16qi_v16qi_v16qi
;
8147 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
8148 && mode3
== V4SImode
)
8149 type
= v4si_ftype_v16qi_v16qi_v4si
;
8150 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
8151 && mode3
== V4SImode
)
8152 type
= v4si_ftype_v8hi_v8hi_v4si
;
8153 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
8154 && mode3
== V4SImode
)
8155 type
= v4sf_ftype_v4sf_v4sf_v4si
;
8157 /* vchar, vchar, vchar, 4 bit literal. */
8158 else if (mode0
== V16QImode
&& mode1
== mode0
&& mode2
== mode0
8160 type
= v16qi_ftype_v16qi_v16qi_int
;
8162 /* vshort, vshort, vshort, 4 bit literal. */
8163 else if (mode0
== V8HImode
&& mode1
== mode0
&& mode2
== mode0
8165 type
= v8hi_ftype_v8hi_v8hi_int
;
8167 /* vint, vint, vint, 4 bit literal. */
8168 else if (mode0
== V4SImode
&& mode1
== mode0
&& mode2
== mode0
8170 type
= v4si_ftype_v4si_v4si_int
;
8172 /* vfloat, vfloat, vfloat, 4 bit literal. */
8173 else if (mode0
== V4SFmode
&& mode1
== mode0
&& mode2
== mode0
8175 type
= v4sf_ftype_v4sf_v4sf_int
;
8180 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
8183 /* Add the simple binary operators. */
8184 d
= (struct builtin_description
*) bdesc_2arg
;
8185 for (i
= 0; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
8187 enum machine_mode mode0
, mode1
, mode2
;
8190 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
8193 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
8194 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
8195 mode2
= insn_data
[d
->icode
].operand
[2].mode
;
8197 /* When all three operands are of the same mode. */
8198 if (mode0
== mode1
&& mode1
== mode2
)
8203 type
= v4sf_ftype_v4sf_v4sf
;
8206 type
= v4si_ftype_v4si_v4si
;
8209 type
= v16qi_ftype_v16qi_v16qi
;
8212 type
= v8hi_ftype_v8hi_v8hi
;
8215 type
= v2si_ftype_v2si_v2si
;
8218 type
= v2sf_ftype_v2sf_v2sf
;
8221 type
= int_ftype_int_int
;
8228 /* A few other combos we really don't want to do manually. */
8230 /* vint, vfloat, vfloat. */
8231 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== V4SFmode
)
8232 type
= v4si_ftype_v4sf_v4sf
;
8234 /* vshort, vchar, vchar. */
8235 else if (mode0
== V8HImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
8236 type
= v8hi_ftype_v16qi_v16qi
;
8238 /* vint, vshort, vshort. */
8239 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
8240 type
= v4si_ftype_v8hi_v8hi
;
8242 /* vshort, vint, vint. */
8243 else if (mode0
== V8HImode
&& mode1
== V4SImode
&& mode2
== V4SImode
)
8244 type
= v8hi_ftype_v4si_v4si
;
8246 /* vchar, vshort, vshort. */
8247 else if (mode0
== V16QImode
&& mode1
== V8HImode
&& mode2
== V8HImode
)
8248 type
= v16qi_ftype_v8hi_v8hi
;
8250 /* vint, vchar, vint. */
8251 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V4SImode
)
8252 type
= v4si_ftype_v16qi_v4si
;
8254 /* vint, vchar, vchar. */
8255 else if (mode0
== V4SImode
&& mode1
== V16QImode
&& mode2
== V16QImode
)
8256 type
= v4si_ftype_v16qi_v16qi
;
8258 /* vint, vshort, vint. */
8259 else if (mode0
== V4SImode
&& mode1
== V8HImode
&& mode2
== V4SImode
)
8260 type
= v4si_ftype_v8hi_v4si
;
8262 /* vint, vint, 5 bit literal. */
8263 else if (mode0
== V4SImode
&& mode1
== V4SImode
&& mode2
== QImode
)
8264 type
= v4si_ftype_v4si_int
;
8266 /* vshort, vshort, 5 bit literal. */
8267 else if (mode0
== V8HImode
&& mode1
== V8HImode
&& mode2
== QImode
)
8268 type
= v8hi_ftype_v8hi_int
;
8270 /* vchar, vchar, 5 bit literal. */
8271 else if (mode0
== V16QImode
&& mode1
== V16QImode
&& mode2
== QImode
)
8272 type
= v16qi_ftype_v16qi_int
;
8274 /* vfloat, vint, 5 bit literal. */
8275 else if (mode0
== V4SFmode
&& mode1
== V4SImode
&& mode2
== QImode
)
8276 type
= v4sf_ftype_v4si_int
;
8278 /* vint, vfloat, 5 bit literal. */
8279 else if (mode0
== V4SImode
&& mode1
== V4SFmode
&& mode2
== QImode
)
8280 type
= v4si_ftype_v4sf_int
;
8282 else if (mode0
== V2SImode
&& mode1
== SImode
&& mode2
== SImode
)
8283 type
= v2si_ftype_int_int
;
8285 else if (mode0
== V2SImode
&& mode1
== V2SImode
&& mode2
== QImode
)
8286 type
= v2si_ftype_v2si_char
;
8288 else if (mode0
== V2SImode
&& mode1
== SImode
&& mode2
== QImode
)
8289 type
= v2si_ftype_int_char
;
8292 else if (mode0
== SImode
)
8297 type
= int_ftype_v4si_v4si
;
8300 type
= int_ftype_v4sf_v4sf
;
8303 type
= int_ftype_v16qi_v16qi
;
8306 type
= int_ftype_v8hi_v8hi
;
8316 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
8319 /* Add the simple unary operators. */
8320 d
= (struct builtin_description
*) bdesc_1arg
;
8321 for (i
= 0; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
8323 enum machine_mode mode0
, mode1
;
8326 if (d
->name
== 0 || d
->icode
== CODE_FOR_nothing
)
8329 mode0
= insn_data
[d
->icode
].operand
[0].mode
;
8330 mode1
= insn_data
[d
->icode
].operand
[1].mode
;
8332 if (mode0
== V4SImode
&& mode1
== QImode
)
8333 type
= v4si_ftype_int
;
8334 else if (mode0
== V8HImode
&& mode1
== QImode
)
8335 type
= v8hi_ftype_int
;
8336 else if (mode0
== V16QImode
&& mode1
== QImode
)
8337 type
= v16qi_ftype_int
;
8338 else if (mode0
== V4SFmode
&& mode1
== V4SFmode
)
8339 type
= v4sf_ftype_v4sf
;
8340 else if (mode0
== V8HImode
&& mode1
== V16QImode
)
8341 type
= v8hi_ftype_v16qi
;
8342 else if (mode0
== V4SImode
&& mode1
== V8HImode
)
8343 type
= v4si_ftype_v8hi
;
8344 else if (mode0
== V2SImode
&& mode1
== V2SImode
)
8345 type
= v2si_ftype_v2si
;
8346 else if (mode0
== V2SFmode
&& mode1
== V2SFmode
)
8347 type
= v2sf_ftype_v2sf
;
8348 else if (mode0
== V2SFmode
&& mode1
== V2SImode
)
8349 type
= v2sf_ftype_v2si
;
8350 else if (mode0
== V2SImode
&& mode1
== V2SFmode
)
8351 type
= v2si_ftype_v2sf
;
8352 else if (mode0
== V2SImode
&& mode1
== QImode
)
8353 type
= v2si_ftype_char
;
8357 def_builtin (d
->mask
, d
->name
, type
, d
->code
);
8362 rs6000_init_libfuncs (void)
8364 if (!TARGET_HARD_FLOAT
)
8367 if (DEFAULT_ABI
!= ABI_V4
)
8369 if (TARGET_XCOFF
&& ! TARGET_POWER2
&& ! TARGET_POWERPC
)
8371 /* AIX library routines for float->int conversion. */
8372 set_conv_libfunc (sfix_optab
, SImode
, DFmode
, "__itrunc");
8373 set_conv_libfunc (ufix_optab
, SImode
, DFmode
, "__uitrunc");
8374 set_conv_libfunc (sfix_optab
, SImode
, TFmode
, "_qitrunc");
8375 set_conv_libfunc (ufix_optab
, SImode
, TFmode
, "_quitrunc");
8378 /* Standard AIX/Darwin/64-bit SVR4 quad floating point routines. */
8379 set_optab_libfunc (add_optab
, TFmode
, "_xlqadd");
8380 set_optab_libfunc (sub_optab
, TFmode
, "_xlqsub");
8381 set_optab_libfunc (smul_optab
, TFmode
, "_xlqmul");
8382 set_optab_libfunc (sdiv_optab
, TFmode
, "_xlqdiv");
8386 /* 32-bit SVR4 quad floating point routines. */
8388 set_optab_libfunc (add_optab
, TFmode
, "_q_add");
8389 set_optab_libfunc (sub_optab
, TFmode
, "_q_sub");
8390 set_optab_libfunc (neg_optab
, TFmode
, "_q_neg");
8391 set_optab_libfunc (smul_optab
, TFmode
, "_q_mul");
8392 set_optab_libfunc (sdiv_optab
, TFmode
, "_q_div");
8393 if (TARGET_PPC_GPOPT
|| TARGET_POWER2
)
8394 set_optab_libfunc (sqrt_optab
, TFmode
, "_q_sqrt");
8396 set_optab_libfunc (eq_optab
, TFmode
, "_q_feq");
8397 set_optab_libfunc (ne_optab
, TFmode
, "_q_fne");
8398 set_optab_libfunc (gt_optab
, TFmode
, "_q_fgt");
8399 set_optab_libfunc (ge_optab
, TFmode
, "_q_fge");
8400 set_optab_libfunc (lt_optab
, TFmode
, "_q_flt");
8401 set_optab_libfunc (le_optab
, TFmode
, "_q_fle");
8403 set_conv_libfunc (sext_optab
, TFmode
, SFmode
, "_q_stoq");
8404 set_conv_libfunc (sext_optab
, TFmode
, DFmode
, "_q_dtoq");
8405 set_conv_libfunc (trunc_optab
, SFmode
, TFmode
, "_q_qtos");
8406 set_conv_libfunc (trunc_optab
, DFmode
, TFmode
, "_q_qtod");
8407 set_conv_libfunc (sfix_optab
, SImode
, TFmode
, "_q_qtoi");
8408 set_conv_libfunc (ufix_optab
, SImode
, TFmode
, "_q_qtou");
8409 set_conv_libfunc (sfloat_optab
, TFmode
, SImode
, "_q_itoq");
8414 /* Expand a block clear operation, and return 1 if successful. Return 0
8415 if we should let the compiler generate normal code.
8417 operands[0] is the destination
8418 operands[1] is the length
8419 operands[2] is the alignment */
8422 expand_block_clear (rtx operands
[])
8424 rtx orig_dest
= operands
[0];
8425 rtx bytes_rtx
= operands
[1];
8426 rtx align_rtx
= operands
[2];
8427 bool constp
= (GET_CODE (bytes_rtx
) == CONST_INT
);
8428 HOST_WIDE_INT align
;
8429 HOST_WIDE_INT bytes
;
8434 /* If this is not a fixed size move, just call memcpy */
8438 /* If this is not a fixed size alignment, abort */
8439 if (GET_CODE (align_rtx
) != CONST_INT
)
8441 align
= INTVAL (align_rtx
) * BITS_PER_UNIT
;
8443 /* Anything to clear? */
8444 bytes
= INTVAL (bytes_rtx
);
8448 /* Use the builtin memset after a point, to avoid huge code bloat.
8449 When optimize_size, avoid any significant code bloat; calling
8450 memset is about 4 instructions, so allow for one instruction to
8451 load zero and three to do clearing. */
8452 if (TARGET_ALTIVEC
&& align
>= 128)
8454 else if (TARGET_POWERPC64
&& align
>= 32)
8459 if (optimize_size
&& bytes
> 3 * clear_step
)
8461 if (! optimize_size
&& bytes
> 8 * clear_step
)
8464 for (offset
= 0; bytes
> 0; offset
+= clear_bytes
, bytes
-= clear_bytes
)
8466 enum machine_mode mode
= BLKmode
;
8469 if (bytes
>= 16 && TARGET_ALTIVEC
&& align
>= 128)
8474 else if (bytes
>= 8 && TARGET_POWERPC64
8475 /* 64-bit loads and stores require word-aligned
8477 && (align
>= 64 || (!STRICT_ALIGNMENT
&& align
>= 32)))
8482 else if (bytes
>= 4 && (align
>= 32 || !STRICT_ALIGNMENT
))
8483 { /* move 4 bytes */
8487 else if (bytes
== 2 && (align
>= 16 || !STRICT_ALIGNMENT
))
8488 { /* move 2 bytes */
8492 else /* move 1 byte at a time */
8498 dest
= adjust_address (orig_dest
, mode
, offset
);
8500 emit_move_insn (dest
, CONST0_RTX (mode
));
8507 /* Expand a block move operation, and return 1 if successful. Return 0
8508 if we should let the compiler generate normal code.
8510 operands[0] is the destination
8511 operands[1] is the source
8512 operands[2] is the length
8513 operands[3] is the alignment */
8515 #define MAX_MOVE_REG 4
8518 expand_block_move (rtx operands
[])
8520 rtx orig_dest
= operands
[0];
8521 rtx orig_src
= operands
[1];
8522 rtx bytes_rtx
= operands
[2];
8523 rtx align_rtx
= operands
[3];
8524 int constp
= (GET_CODE (bytes_rtx
) == CONST_INT
);
8529 rtx stores
[MAX_MOVE_REG
];
8532 /* If this is not a fixed size move, just call memcpy */
8536 /* If this is not a fixed size alignment, abort */
8537 if (GET_CODE (align_rtx
) != CONST_INT
)
8539 align
= INTVAL (align_rtx
) * BITS_PER_UNIT
;
8541 /* Anything to move? */
8542 bytes
= INTVAL (bytes_rtx
);
8546 /* store_one_arg depends on expand_block_move to handle at least the size of
8547 reg_parm_stack_space. */
8548 if (bytes
> (TARGET_POWERPC64
? 64 : 32))
8551 for (offset
= 0; bytes
> 0; offset
+= move_bytes
, bytes
-= move_bytes
)
8554 rtx (*movmemsi
) (rtx
, rtx
, rtx
, rtx
);
8555 rtx (*mov
) (rtx
, rtx
);
8557 enum machine_mode mode
= BLKmode
;
8560 /* Altivec first, since it will be faster than a string move
8561 when it applies, and usually not significantly larger. */
8562 if (TARGET_ALTIVEC
&& bytes
>= 16 && align
>= 128)
8566 gen_func
.mov
= gen_movv4si
;
8568 else if (TARGET_STRING
8569 && bytes
> 24 /* move up to 32 bytes at a time */
8577 && ! fixed_regs
[12])
8579 move_bytes
= (bytes
> 32) ? 32 : bytes
;
8580 gen_func
.movmemsi
= gen_movmemsi_8reg
;
8582 else if (TARGET_STRING
8583 && bytes
> 16 /* move up to 24 bytes at a time */
8589 && ! fixed_regs
[10])
8591 move_bytes
= (bytes
> 24) ? 24 : bytes
;
8592 gen_func
.movmemsi
= gen_movmemsi_6reg
;
8594 else if (TARGET_STRING
8595 && bytes
> 8 /* move up to 16 bytes at a time */
8601 move_bytes
= (bytes
> 16) ? 16 : bytes
;
8602 gen_func
.movmemsi
= gen_movmemsi_4reg
;
8604 else if (bytes
>= 8 && TARGET_POWERPC64
8605 /* 64-bit loads and stores require word-aligned
8607 && (align
>= 64 || (!STRICT_ALIGNMENT
&& align
>= 32)))
8611 gen_func
.mov
= gen_movdi
;
8613 else if (TARGET_STRING
&& bytes
> 4 && !TARGET_POWERPC64
)
8614 { /* move up to 8 bytes at a time */
8615 move_bytes
= (bytes
> 8) ? 8 : bytes
;
8616 gen_func
.movmemsi
= gen_movmemsi_2reg
;
8618 else if (bytes
>= 4 && (align
>= 32 || !STRICT_ALIGNMENT
))
8619 { /* move 4 bytes */
8622 gen_func
.mov
= gen_movsi
;
8624 else if (bytes
== 2 && (align
>= 16 || !STRICT_ALIGNMENT
))
8625 { /* move 2 bytes */
8628 gen_func
.mov
= gen_movhi
;
8630 else if (TARGET_STRING
&& bytes
> 1)
8631 { /* move up to 4 bytes at a time */
8632 move_bytes
= (bytes
> 4) ? 4 : bytes
;
8633 gen_func
.movmemsi
= gen_movmemsi_1reg
;
8635 else /* move 1 byte at a time */
8639 gen_func
.mov
= gen_movqi
;
8642 src
= adjust_address (orig_src
, mode
, offset
);
8643 dest
= adjust_address (orig_dest
, mode
, offset
);
8645 if (mode
!= BLKmode
)
8647 rtx tmp_reg
= gen_reg_rtx (mode
);
8649 emit_insn ((*gen_func
.mov
) (tmp_reg
, src
));
8650 stores
[num_reg
++] = (*gen_func
.mov
) (dest
, tmp_reg
);
8653 if (mode
== BLKmode
|| num_reg
>= MAX_MOVE_REG
|| bytes
== move_bytes
)
8656 for (i
= 0; i
< num_reg
; i
++)
8657 emit_insn (stores
[i
]);
8661 if (mode
== BLKmode
)
8663 /* Move the address into scratch registers. The movmemsi
8664 patterns require zero offset. */
8665 if (!REG_P (XEXP (src
, 0)))
8667 rtx src_reg
= copy_addr_to_reg (XEXP (src
, 0));
8668 src
= replace_equiv_address (src
, src_reg
);
8670 set_mem_size (src
, GEN_INT (move_bytes
));
8672 if (!REG_P (XEXP (dest
, 0)))
8674 rtx dest_reg
= copy_addr_to_reg (XEXP (dest
, 0));
8675 dest
= replace_equiv_address (dest
, dest_reg
);
8677 set_mem_size (dest
, GEN_INT (move_bytes
));
8679 emit_insn ((*gen_func
.movmemsi
) (dest
, src
,
8680 GEN_INT (move_bytes
& 31),
8689 /* Return 1 if OP is suitable for a save_world call in prologue. It is
8690 known to be a PARALLEL. */
8692 save_world_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
8697 int count
= XVECLEN (op
, 0);
8703 if (GET_CODE (XVECEXP (op
, 0, index
++)) != CLOBBER
8704 || GET_CODE (XVECEXP (op
, 0, index
++)) != USE
)
8707 for (i
=1; i
<= 18; i
++)
8709 elt
= XVECEXP (op
, 0, index
++);
8710 if (GET_CODE (elt
) != SET
8711 || GET_CODE (SET_DEST (elt
)) != MEM
8712 || ! memory_operand (SET_DEST (elt
), DFmode
)
8713 || GET_CODE (SET_SRC (elt
)) != REG
8714 || GET_MODE (SET_SRC (elt
)) != DFmode
)
8718 for (i
=1; i
<= 12; i
++)
8720 elt
= XVECEXP (op
, 0, index
++);
8721 if (GET_CODE (elt
) != SET
8722 || GET_CODE (SET_DEST (elt
)) != MEM
8723 || GET_CODE (SET_SRC (elt
)) != REG
8724 || GET_MODE (SET_SRC (elt
)) != V4SImode
)
8728 for (i
=1; i
<= 19; i
++)
8730 elt
= XVECEXP (op
, 0, index
++);
8731 if (GET_CODE (elt
) != SET
8732 || GET_CODE (SET_DEST (elt
)) != MEM
8733 || ! memory_operand (SET_DEST (elt
), Pmode
)
8734 || GET_CODE (SET_SRC (elt
)) != REG
8735 || GET_MODE (SET_SRC (elt
)) != Pmode
)
8739 elt
= XVECEXP (op
, 0, index
++);
8740 if (GET_CODE (elt
) != SET
8741 || GET_CODE (SET_DEST (elt
)) != MEM
8742 || ! memory_operand (SET_DEST (elt
), Pmode
)
8743 || GET_CODE (SET_SRC (elt
)) != REG
8744 || REGNO (SET_SRC (elt
)) != CR2_REGNO
8745 || GET_MODE (SET_SRC (elt
)) != Pmode
)
8748 if (GET_CODE (XVECEXP (op
, 0, index
++)) != USE
8749 || GET_CODE (XVECEXP (op
, 0, index
++)) != USE
8750 || GET_CODE (XVECEXP (op
, 0, index
++)) != CLOBBER
)
8755 /* Return 1 if OP is suitable for a save_world call in prologue. It is
8756 known to be a PARALLEL. */
8758 restore_world_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
8763 int count
= XVECLEN (op
, 0);
8769 if (GET_CODE (XVECEXP (op
, 0, index
++)) != RETURN
8770 || GET_CODE (XVECEXP (op
, 0, index
++)) != USE
8771 || GET_CODE (XVECEXP (op
, 0, index
++)) != USE
8772 || GET_CODE (XVECEXP (op
, 0, index
++)) != CLOBBER
)
8775 elt
= XVECEXP (op
, 0, index
++);
8776 if (GET_CODE (elt
) != SET
8777 || GET_CODE (SET_SRC (elt
)) != MEM
8778 || ! memory_operand (SET_SRC (elt
), Pmode
)
8779 || GET_CODE (SET_DEST (elt
)) != REG
8780 || REGNO (SET_DEST (elt
)) != CR2_REGNO
8781 || GET_MODE (SET_DEST (elt
)) != Pmode
)
8784 for (i
=1; i
<= 19; i
++)
8786 elt
= XVECEXP (op
, 0, index
++);
8787 if (GET_CODE (elt
) != SET
8788 || GET_CODE (SET_SRC (elt
)) != MEM
8789 || ! memory_operand (SET_SRC (elt
), Pmode
)
8790 || GET_CODE (SET_DEST (elt
)) != REG
8791 || GET_MODE (SET_DEST (elt
)) != Pmode
)
8795 for (i
=1; i
<= 12; i
++)
8797 elt
= XVECEXP (op
, 0, index
++);
8798 if (GET_CODE (elt
) != SET
8799 || GET_CODE (SET_SRC (elt
)) != MEM
8800 || GET_CODE (SET_DEST (elt
)) != REG
8801 || GET_MODE (SET_DEST (elt
)) != V4SImode
)
8805 for (i
=1; i
<= 18; i
++)
8807 elt
= XVECEXP (op
, 0, index
++);
8808 if (GET_CODE (elt
) != SET
8809 || GET_CODE (SET_SRC (elt
)) != MEM
8810 || ! memory_operand (SET_SRC (elt
), DFmode
)
8811 || GET_CODE (SET_DEST (elt
)) != REG
8812 || GET_MODE (SET_DEST (elt
)) != DFmode
)
8816 if (GET_CODE (XVECEXP (op
, 0, index
++)) != CLOBBER
8817 || GET_CODE (XVECEXP (op
, 0, index
++)) != CLOBBER
8818 || GET_CODE (XVECEXP (op
, 0, index
++)) != CLOBBER
8819 || GET_CODE (XVECEXP (op
, 0, index
++)) != CLOBBER
8820 || GET_CODE (XVECEXP (op
, 0, index
++)) != USE
)
8826 /* Return 1 if OP is a load multiple operation. It is known to be a
8827 PARALLEL and the first section will be tested. */
8830 load_multiple_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
8832 int count
= XVECLEN (op
, 0);
8833 unsigned int dest_regno
;
8837 /* Perform a quick check so we don't blow up below. */
8839 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
8840 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
8841 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
8844 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
8845 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
8847 for (i
= 1; i
< count
; i
++)
8849 rtx elt
= XVECEXP (op
, 0, i
);
8851 if (GET_CODE (elt
) != SET
8852 || GET_CODE (SET_DEST (elt
)) != REG
8853 || GET_MODE (SET_DEST (elt
)) != SImode
8854 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
8855 || GET_CODE (SET_SRC (elt
)) != MEM
8856 || GET_MODE (SET_SRC (elt
)) != SImode
8857 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != PLUS
8858 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt
), 0), 0), src_addr
)
8859 || GET_CODE (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != CONST_INT
8860 || INTVAL (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != i
* 4)
8867 /* Similar, but tests for store multiple. Here, the second vector element
8868 is a CLOBBER. It will be tested later. */
8871 store_multiple_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
8873 int count
= XVECLEN (op
, 0) - 1;
8874 unsigned int src_regno
;
8878 /* Perform a quick check so we don't blow up below. */
8880 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
8881 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
8882 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
8885 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
8886 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
8888 for (i
= 1; i
< count
; i
++)
8890 rtx elt
= XVECEXP (op
, 0, i
+ 1);
8892 if (GET_CODE (elt
) != SET
8893 || GET_CODE (SET_SRC (elt
)) != REG
8894 || GET_MODE (SET_SRC (elt
)) != SImode
8895 || REGNO (SET_SRC (elt
)) != src_regno
+ i
8896 || GET_CODE (SET_DEST (elt
)) != MEM
8897 || GET_MODE (SET_DEST (elt
)) != SImode
8898 || GET_CODE (XEXP (SET_DEST (elt
), 0)) != PLUS
8899 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt
), 0), 0), dest_addr
)
8900 || GET_CODE (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != CONST_INT
8901 || INTVAL (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != i
* 4)
8908 /* Return a string to perform a load_multiple operation.
8909 operands[0] is the vector.
8910 operands[1] is the source address.
8911 operands[2] is the first destination register. */
8914 rs6000_output_load_multiple (rtx operands
[3])
8916 /* We have to handle the case where the pseudo used to contain the address
8917 is assigned to one of the output registers. */
8919 int words
= XVECLEN (operands
[0], 0);
8922 if (XVECLEN (operands
[0], 0) == 1)
8923 return "{l|lwz} %2,0(%1)";
8925 for (i
= 0; i
< words
; i
++)
8926 if (refers_to_regno_p (REGNO (operands
[2]) + i
,
8927 REGNO (operands
[2]) + i
+ 1, operands
[1], 0))
8931 xop
[0] = GEN_INT (4 * (words
-1));
8932 xop
[1] = operands
[1];
8933 xop
[2] = operands
[2];
8934 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop
);
8939 xop
[0] = GEN_INT (4 * (words
-1));
8940 xop
[1] = operands
[1];
8941 xop
[2] = gen_rtx_REG (SImode
, REGNO (operands
[2]) + 1);
8942 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop
);
8947 for (j
= 0; j
< words
; j
++)
8950 xop
[0] = GEN_INT (j
* 4);
8951 xop
[1] = operands
[1];
8952 xop
[2] = gen_rtx_REG (SImode
, REGNO (operands
[2]) + j
);
8953 output_asm_insn ("{l|lwz} %2,%0(%1)", xop
);
8955 xop
[0] = GEN_INT (i
* 4);
8956 xop
[1] = operands
[1];
8957 output_asm_insn ("{l|lwz} %1,%0(%1)", xop
);
8962 return "{lsi|lswi} %2,%1,%N0";
8965 /* Return 1 for a parallel vrsave operation. */
8968 vrsave_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
8970 int count
= XVECLEN (op
, 0);
8971 unsigned int dest_regno
, src_regno
;
8975 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
8976 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
8977 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC_VOLATILE
)
8980 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
8981 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
8983 if (dest_regno
!= VRSAVE_REGNO
8984 && src_regno
!= VRSAVE_REGNO
)
8987 for (i
= 1; i
< count
; i
++)
8989 rtx elt
= XVECEXP (op
, 0, i
);
8991 if (GET_CODE (elt
) != CLOBBER
8992 && GET_CODE (elt
) != SET
)
8999 /* Return 1 for an PARALLEL suitable for mfcr. */
9002 mfcr_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
9004 int count
= XVECLEN (op
, 0);
9007 /* Perform a quick check so we don't blow up below. */
9009 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
9010 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC
9011 || XVECLEN (SET_SRC (XVECEXP (op
, 0, 0)), 0) != 2)
9014 for (i
= 0; i
< count
; i
++)
9016 rtx exp
= XVECEXP (op
, 0, i
);
9021 src_reg
= XVECEXP (SET_SRC (exp
), 0, 0);
9023 if (GET_CODE (src_reg
) != REG
9024 || GET_MODE (src_reg
) != CCmode
9025 || ! CR_REGNO_P (REGNO (src_reg
)))
9028 if (GET_CODE (exp
) != SET
9029 || GET_CODE (SET_DEST (exp
)) != REG
9030 || GET_MODE (SET_DEST (exp
)) != SImode
9031 || ! INT_REGNO_P (REGNO (SET_DEST (exp
))))
9033 unspec
= SET_SRC (exp
);
9034 maskval
= 1 << (MAX_CR_REGNO
- REGNO (src_reg
));
9036 if (GET_CODE (unspec
) != UNSPEC
9037 || XINT (unspec
, 1) != UNSPEC_MOVESI_FROM_CR
9038 || XVECLEN (unspec
, 0) != 2
9039 || XVECEXP (unspec
, 0, 0) != src_reg
9040 || GET_CODE (XVECEXP (unspec
, 0, 1)) != CONST_INT
9041 || INTVAL (XVECEXP (unspec
, 0, 1)) != maskval
)
9047 /* Return 1 for an PARALLEL suitable for mtcrf. */
9050 mtcrf_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
9052 int count
= XVECLEN (op
, 0);
9056 /* Perform a quick check so we don't blow up below. */
9058 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
9059 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC
9060 || XVECLEN (SET_SRC (XVECEXP (op
, 0, 0)), 0) != 2)
9062 src_reg
= XVECEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0, 0);
9064 if (GET_CODE (src_reg
) != REG
9065 || GET_MODE (src_reg
) != SImode
9066 || ! INT_REGNO_P (REGNO (src_reg
)))
9069 for (i
= 0; i
< count
; i
++)
9071 rtx exp
= XVECEXP (op
, 0, i
);
9075 if (GET_CODE (exp
) != SET
9076 || GET_CODE (SET_DEST (exp
)) != REG
9077 || GET_MODE (SET_DEST (exp
)) != CCmode
9078 || ! CR_REGNO_P (REGNO (SET_DEST (exp
))))
9080 unspec
= SET_SRC (exp
);
9081 maskval
= 1 << (MAX_CR_REGNO
- REGNO (SET_DEST (exp
)));
9083 if (GET_CODE (unspec
) != UNSPEC
9084 || XINT (unspec
, 1) != UNSPEC_MOVESI_TO_CR
9085 || XVECLEN (unspec
, 0) != 2
9086 || XVECEXP (unspec
, 0, 0) != src_reg
9087 || GET_CODE (XVECEXP (unspec
, 0, 1)) != CONST_INT
9088 || INTVAL (XVECEXP (unspec
, 0, 1)) != maskval
)
9094 /* Return 1 for an PARALLEL suitable for lmw. */
9097 lmw_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
9099 int count
= XVECLEN (op
, 0);
9100 unsigned int dest_regno
;
9102 unsigned int base_regno
;
9103 HOST_WIDE_INT offset
;
9106 /* Perform a quick check so we don't blow up below. */
9108 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
9109 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != REG
9110 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != MEM
)
9113 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, 0)));
9114 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0);
9117 || count
!= 32 - (int) dest_regno
)
9120 if (legitimate_indirect_address_p (src_addr
, 0))
9123 base_regno
= REGNO (src_addr
);
9124 if (base_regno
== 0)
9127 else if (rs6000_legitimate_offset_address_p (SImode
, src_addr
, 0))
9129 offset
= INTVAL (XEXP (src_addr
, 1));
9130 base_regno
= REGNO (XEXP (src_addr
, 0));
9135 for (i
= 0; i
< count
; i
++)
9137 rtx elt
= XVECEXP (op
, 0, i
);
9140 HOST_WIDE_INT newoffset
;
9142 if (GET_CODE (elt
) != SET
9143 || GET_CODE (SET_DEST (elt
)) != REG
9144 || GET_MODE (SET_DEST (elt
)) != SImode
9145 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
9146 || GET_CODE (SET_SRC (elt
)) != MEM
9147 || GET_MODE (SET_SRC (elt
)) != SImode
)
9149 newaddr
= XEXP (SET_SRC (elt
), 0);
9150 if (legitimate_indirect_address_p (newaddr
, 0))
9155 else if (rs6000_legitimate_offset_address_p (SImode
, newaddr
, 0))
9157 addr_reg
= XEXP (newaddr
, 0);
9158 newoffset
= INTVAL (XEXP (newaddr
, 1));
9162 if (REGNO (addr_reg
) != base_regno
9163 || newoffset
!= offset
+ 4 * i
)
9170 /* Return 1 for an PARALLEL suitable for stmw. */
9173 stmw_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
9175 int count
= XVECLEN (op
, 0);
9176 unsigned int src_regno
;
9178 unsigned int base_regno
;
9179 HOST_WIDE_INT offset
;
9182 /* Perform a quick check so we don't blow up below. */
9184 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
9185 || GET_CODE (SET_DEST (XVECEXP (op
, 0, 0))) != MEM
9186 || GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != REG
)
9189 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, 0)));
9190 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, 0)), 0);
9193 || count
!= 32 - (int) src_regno
)
9196 if (legitimate_indirect_address_p (dest_addr
, 0))
9199 base_regno
= REGNO (dest_addr
);
9200 if (base_regno
== 0)
9203 else if (rs6000_legitimate_offset_address_p (SImode
, dest_addr
, 0))
9205 offset
= INTVAL (XEXP (dest_addr
, 1));
9206 base_regno
= REGNO (XEXP (dest_addr
, 0));
9211 for (i
= 0; i
< count
; i
++)
9213 rtx elt
= XVECEXP (op
, 0, i
);
9216 HOST_WIDE_INT newoffset
;
9218 if (GET_CODE (elt
) != SET
9219 || GET_CODE (SET_SRC (elt
)) != REG
9220 || GET_MODE (SET_SRC (elt
)) != SImode
9221 || REGNO (SET_SRC (elt
)) != src_regno
+ i
9222 || GET_CODE (SET_DEST (elt
)) != MEM
9223 || GET_MODE (SET_DEST (elt
)) != SImode
)
9225 newaddr
= XEXP (SET_DEST (elt
), 0);
9226 if (legitimate_indirect_address_p (newaddr
, 0))
9231 else if (rs6000_legitimate_offset_address_p (SImode
, newaddr
, 0))
9233 addr_reg
= XEXP (newaddr
, 0);
9234 newoffset
= INTVAL (XEXP (newaddr
, 1));
9238 if (REGNO (addr_reg
) != base_regno
9239 || newoffset
!= offset
+ 4 * i
)
9246 /* A validation routine: say whether CODE, a condition code, and MODE
9247 match. The other alternatives either don't make sense or should
9248 never be generated. */
9251 validate_condition_mode (enum rtx_code code
, enum machine_mode mode
)
9253 if ((GET_RTX_CLASS (code
) != RTX_COMPARE
9254 && GET_RTX_CLASS (code
) != RTX_COMM_COMPARE
)
9255 || GET_MODE_CLASS (mode
) != MODE_CC
)
9258 /* These don't make sense. */
9259 if ((code
== GT
|| code
== LT
|| code
== GE
|| code
== LE
)
9260 && mode
== CCUNSmode
)
9263 if ((code
== GTU
|| code
== LTU
|| code
== GEU
|| code
== LEU
)
9264 && mode
!= CCUNSmode
)
9267 if (mode
!= CCFPmode
9268 && (code
== ORDERED
|| code
== UNORDERED
9269 || code
== UNEQ
|| code
== LTGT
9270 || code
== UNGT
|| code
== UNLT
9271 || code
== UNGE
|| code
== UNLE
))
9274 /* These should never be generated except for
9275 flag_finite_math_only. */
9276 if (mode
== CCFPmode
9277 && ! flag_finite_math_only
9278 && (code
== LE
|| code
== GE
9279 || code
== UNEQ
|| code
== LTGT
9280 || code
== UNGT
|| code
== UNLT
))
9283 /* These are invalid; the information is not there. */
9284 if (mode
== CCEQmode
9285 && code
!= EQ
&& code
!= NE
)
9289 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
9290 We only check the opcode against the mode of the CC value here. */
9293 branch_comparison_operator (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
9295 enum rtx_code code
= GET_CODE (op
);
9296 enum machine_mode cc_mode
;
9298 if (!COMPARISON_P (op
))
9301 cc_mode
= GET_MODE (XEXP (op
, 0));
9302 if (GET_MODE_CLASS (cc_mode
) != MODE_CC
)
9305 validate_condition_mode (code
, cc_mode
);
9310 /* Return 1 if OP is a comparison operation that is valid for a branch
9311 insn and which is true if the corresponding bit in the CC register
9315 branch_positive_comparison_operator (rtx op
, enum machine_mode mode
)
9319 if (! branch_comparison_operator (op
, mode
))
9322 code
= GET_CODE (op
);
9323 return (code
== EQ
|| code
== LT
|| code
== GT
9324 || code
== LTU
|| code
== GTU
9325 || code
== UNORDERED
);
9328 /* Return 1 if OP is a comparison operation that is valid for an scc
9329 insn: it must be a positive comparison. */
9332 scc_comparison_operator (rtx op
, enum machine_mode mode
)
9334 return branch_positive_comparison_operator (op
, mode
);
9338 trap_comparison_operator (rtx op
, enum machine_mode mode
)
9340 if (mode
!= VOIDmode
&& mode
!= GET_MODE (op
))
9342 return COMPARISON_P (op
);
9346 boolean_operator (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
9348 enum rtx_code code
= GET_CODE (op
);
9349 return (code
== AND
|| code
== IOR
|| code
== XOR
);
9353 boolean_or_operator (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
9355 enum rtx_code code
= GET_CODE (op
);
9356 return (code
== IOR
|| code
== XOR
);
9360 min_max_operator (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
9362 enum rtx_code code
= GET_CODE (op
);
9363 return (code
== SMIN
|| code
== SMAX
|| code
== UMIN
|| code
== UMAX
);
9366 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
9367 mask required to convert the result of a rotate insn into a shift
9368 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
9371 includes_lshift_p (rtx shiftop
, rtx andop
)
9373 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
9375 shift_mask
<<= INTVAL (shiftop
);
9377 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
9380 /* Similar, but for right shift. */
9383 includes_rshift_p (rtx shiftop
, rtx andop
)
9385 unsigned HOST_WIDE_INT shift_mask
= ~(unsigned HOST_WIDE_INT
) 0;
9387 shift_mask
>>= INTVAL (shiftop
);
9389 return (INTVAL (andop
) & 0xffffffff & ~shift_mask
) == 0;
9392 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
9393 to perform a left shift. It must have exactly SHIFTOP least
9394 significant 0's, then one or more 1's, then zero or more 0's. */
9397 includes_rldic_lshift_p (rtx shiftop
, rtx andop
)
9399 if (GET_CODE (andop
) == CONST_INT
)
9401 HOST_WIDE_INT c
, lsb
, shift_mask
;
9404 if (c
== 0 || c
== ~0)
9408 shift_mask
<<= INTVAL (shiftop
);
9410 /* Find the least significant one bit. */
9413 /* It must coincide with the LSB of the shift mask. */
9414 if (-lsb
!= shift_mask
)
9417 /* Invert to look for the next transition (if any). */
9420 /* Remove the low group of ones (originally low group of zeros). */
9423 /* Again find the lsb, and check we have all 1's above. */
9427 else if (GET_CODE (andop
) == CONST_DOUBLE
9428 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
9430 HOST_WIDE_INT low
, high
, lsb
;
9431 HOST_WIDE_INT shift_mask_low
, shift_mask_high
;
9433 low
= CONST_DOUBLE_LOW (andop
);
9434 if (HOST_BITS_PER_WIDE_INT
< 64)
9435 high
= CONST_DOUBLE_HIGH (andop
);
9437 if ((low
== 0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== 0))
9438 || (low
== ~0 && (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0)))
9441 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
9443 shift_mask_high
= ~0;
9444 if (INTVAL (shiftop
) > 32)
9445 shift_mask_high
<<= INTVAL (shiftop
) - 32;
9449 if (-lsb
!= shift_mask_high
|| INTVAL (shiftop
) < 32)
9456 return high
== -lsb
;
9459 shift_mask_low
= ~0;
9460 shift_mask_low
<<= INTVAL (shiftop
);
9464 if (-lsb
!= shift_mask_low
)
9467 if (HOST_BITS_PER_WIDE_INT
< 64)
9472 if (HOST_BITS_PER_WIDE_INT
< 64 && low
== 0)
9475 return high
== -lsb
;
9479 return low
== -lsb
&& (HOST_BITS_PER_WIDE_INT
>= 64 || high
== ~0);
9485 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
9486 to perform a left shift. It must have SHIFTOP or more least
9487 significant 0's, with the remainder of the word 1's. */
9490 includes_rldicr_lshift_p (rtx shiftop
, rtx andop
)
9492 if (GET_CODE (andop
) == CONST_INT
)
9494 HOST_WIDE_INT c
, lsb
, shift_mask
;
9497 shift_mask
<<= INTVAL (shiftop
);
9500 /* Find the least significant one bit. */
9503 /* It must be covered by the shift mask.
9504 This test also rejects c == 0. */
9505 if ((lsb
& shift_mask
) == 0)
9508 /* Check we have all 1's above the transition, and reject all 1's. */
9509 return c
== -lsb
&& lsb
!= 1;
9511 else if (GET_CODE (andop
) == CONST_DOUBLE
9512 && (GET_MODE (andop
) == VOIDmode
|| GET_MODE (andop
) == DImode
))
9514 HOST_WIDE_INT low
, lsb
, shift_mask_low
;
9516 low
= CONST_DOUBLE_LOW (andop
);
9518 if (HOST_BITS_PER_WIDE_INT
< 64)
9520 HOST_WIDE_INT high
, shift_mask_high
;
9522 high
= CONST_DOUBLE_HIGH (andop
);
9526 shift_mask_high
= ~0;
9527 if (INTVAL (shiftop
) > 32)
9528 shift_mask_high
<<= INTVAL (shiftop
) - 32;
9532 if ((lsb
& shift_mask_high
) == 0)
9535 return high
== -lsb
;
9541 shift_mask_low
= ~0;
9542 shift_mask_low
<<= INTVAL (shiftop
);
9546 if ((lsb
& shift_mask_low
) == 0)
9549 return low
== -lsb
&& lsb
!= 1;
9555 /* Return 1 if operands will generate a valid arguments to rlwimi
9556 instruction for insert with right shift in 64-bit mode. The mask may
9557 not start on the first bit or stop on the last bit because wrap-around
9558 effects of instruction do not correspond to semantics of RTL insn. */
9561 insvdi_rshift_rlwimi_p (rtx sizeop
, rtx startop
, rtx shiftop
)
9563 if (INTVAL (startop
) < 64
9564 && INTVAL (startop
) > 32
9565 && (INTVAL (sizeop
) + INTVAL (startop
) < 64)
9566 && (INTVAL (sizeop
) + INTVAL (startop
) > 33)
9567 && (INTVAL (sizeop
) + INTVAL (startop
) + INTVAL (shiftop
) < 96)
9568 && (INTVAL (sizeop
) + INTVAL (startop
) + INTVAL (shiftop
) >= 64)
9569 && (64 - (INTVAL (shiftop
) & 63)) >= INTVAL (sizeop
))
9575 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
9576 for lfq and stfq insns iff the registers are hard registers. */
9579 registers_ok_for_quad_peep (rtx reg1
, rtx reg2
)
9581 /* We might have been passed a SUBREG. */
9582 if (GET_CODE (reg1
) != REG
|| GET_CODE (reg2
) != REG
)
9585 /* We might have been passed non floating point registers. */
9586 if (!FP_REGNO_P (REGNO (reg1
))
9587 || !FP_REGNO_P (REGNO (reg2
)))
9590 return (REGNO (reg1
) == REGNO (reg2
) - 1);
9593 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
9594 addr1 and addr2 must be in consecutive memory locations
9595 (addr2 == addr1 + 8). */
9598 mems_ok_for_quad_peep (rtx mem1
, rtx mem2
)
9604 /* The mems cannot be volatile. */
9605 if (MEM_VOLATILE_P (mem1
) || MEM_VOLATILE_P (mem2
))
9608 addr1
= XEXP (mem1
, 0);
9609 addr2
= XEXP (mem2
, 0);
9611 /* Extract an offset (if used) from the first addr. */
9612 if (GET_CODE (addr1
) == PLUS
)
9614 /* If not a REG, return zero. */
9615 if (GET_CODE (XEXP (addr1
, 0)) != REG
)
9619 reg1
= REGNO (XEXP (addr1
, 0));
9620 /* The offset must be constant! */
9621 if (GET_CODE (XEXP (addr1
, 1)) != CONST_INT
)
9623 offset1
= INTVAL (XEXP (addr1
, 1));
9626 else if (GET_CODE (addr1
) != REG
)
9630 reg1
= REGNO (addr1
);
9631 /* This was a simple (mem (reg)) expression. Offset is 0. */
9635 /* Make sure the second address is a (mem (plus (reg) (const_int)))
9636 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
9637 register as addr1. */
9638 if (offset1
== -8 && GET_CODE (addr2
) == REG
&& reg1
== REGNO (addr2
))
9640 if (GET_CODE (addr2
) != PLUS
)
9643 if (GET_CODE (XEXP (addr2
, 0)) != REG
9644 || GET_CODE (XEXP (addr2
, 1)) != CONST_INT
)
9647 if (reg1
!= REGNO (XEXP (addr2
, 0)))
9650 /* The offset for the second addr must be 8 more than the first addr. */
9651 if (INTVAL (XEXP (addr2
, 1)) != offset1
+ 8)
9654 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
9659 /* Return the register class of a scratch register needed to copy IN into
9660 or out of a register in CLASS in MODE. If it can be done directly,
9661 NO_REGS is returned. */
9664 secondary_reload_class (enum reg_class
class,
9665 enum machine_mode mode ATTRIBUTE_UNUSED
,
9670 if (TARGET_ELF
|| (DEFAULT_ABI
== ABI_DARWIN
9672 && MACHOPIC_INDIRECT
9676 /* We cannot copy a symbolic operand directly into anything
9677 other than BASE_REGS for TARGET_ELF. So indicate that a
9678 register from BASE_REGS is needed as an intermediate
9681 On Darwin, pic addresses require a load from memory, which
9682 needs a base register. */
9683 if (class != BASE_REGS
9684 && (GET_CODE (in
) == SYMBOL_REF
9685 || GET_CODE (in
) == HIGH
9686 || GET_CODE (in
) == LABEL_REF
9687 || GET_CODE (in
) == CONST
))
9691 if (GET_CODE (in
) == REG
)
9694 if (regno
>= FIRST_PSEUDO_REGISTER
)
9696 regno
= true_regnum (in
);
9697 if (regno
>= FIRST_PSEUDO_REGISTER
)
9701 else if (GET_CODE (in
) == SUBREG
)
9703 regno
= true_regnum (in
);
9704 if (regno
>= FIRST_PSEUDO_REGISTER
)
9710 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
9712 if (class == GENERAL_REGS
|| class == BASE_REGS
9713 || (regno
>= 0 && INT_REGNO_P (regno
)))
9716 /* Constants, memory, and FP registers can go into FP registers. */
9717 if ((regno
== -1 || FP_REGNO_P (regno
))
9718 && (class == FLOAT_REGS
|| class == NON_SPECIAL_REGS
))
9721 /* Memory, and AltiVec registers can go into AltiVec registers. */
9722 if ((regno
== -1 || ALTIVEC_REGNO_P (regno
))
9723 && class == ALTIVEC_REGS
)
9726 /* We can copy among the CR registers. */
9727 if ((class == CR_REGS
|| class == CR0_REGS
)
9728 && regno
>= 0 && CR_REGNO_P (regno
))
9731 /* Otherwise, we need GENERAL_REGS. */
9732 return GENERAL_REGS
;
9735 /* Given a comparison operation, return the bit number in CCR to test. We
9736 know this is a valid comparison.
9738 SCC_P is 1 if this is for an scc. That means that %D will have been
9739 used instead of %C, so the bits will be in different places.
9741 Return -1 if OP isn't a valid comparison for some reason. */
9744 ccr_bit (rtx op
, int scc_p
)
9746 enum rtx_code code
= GET_CODE (op
);
9747 enum machine_mode cc_mode
;
9752 if (!COMPARISON_P (op
))
9757 if (GET_CODE (reg
) != REG
9758 || ! CR_REGNO_P (REGNO (reg
)))
9761 cc_mode
= GET_MODE (reg
);
9762 cc_regnum
= REGNO (reg
);
9763 base_bit
= 4 * (cc_regnum
- CR0_REGNO
);
9765 validate_condition_mode (code
, cc_mode
);
9767 /* When generating a sCOND operation, only positive conditions are
9769 if (scc_p
&& code
!= EQ
&& code
!= GT
&& code
!= LT
&& code
!= UNORDERED
9770 && code
!= GTU
&& code
!= LTU
)
9776 return scc_p
? base_bit
+ 3 : base_bit
+ 2;
9778 return base_bit
+ 2;
9779 case GT
: case GTU
: case UNLE
:
9780 return base_bit
+ 1;
9781 case LT
: case LTU
: case UNGE
:
9783 case ORDERED
: case UNORDERED
:
9784 return base_bit
+ 3;
9787 /* If scc, we will have done a cror to put the bit in the
9788 unordered position. So test that bit. For integer, this is ! LT
9789 unless this is an scc insn. */
9790 return scc_p
? base_bit
+ 3 : base_bit
;
9793 return scc_p
? base_bit
+ 3 : base_bit
+ 1;
9800 /* Return the GOT register. */
9803 rs6000_got_register (rtx value ATTRIBUTE_UNUSED
)
9805 /* The second flow pass currently (June 1999) can't update
9806 regs_ever_live without disturbing other parts of the compiler, so
9807 update it here to make the prolog/epilogue code happy. */
9808 if (no_new_pseudos
&& ! regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
])
9809 regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
] = 1;
9811 current_function_uses_pic_offset_table
= 1;
9813 return pic_offset_table_rtx
;
9816 /* Function to init struct machine_function.
9817 This will be called, via a pointer variable,
9818 from push_function_context. */
9820 static struct machine_function
*
9821 rs6000_init_machine_status (void)
9823 return ggc_alloc_cleared (sizeof (machine_function
));
9826 /* These macros test for integers and extract the low-order bits. */
9828 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
9829 && GET_MODE (X) == VOIDmode)
9831 #define INT_LOWPART(X) \
9832 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
9838 unsigned long val
= INT_LOWPART (op
);
9840 /* If the high bit is zero, the value is the first 1 bit we find
9842 if ((val
& 0x80000000) == 0)
9844 if ((val
& 0xffffffff) == 0)
9848 while (((val
<<= 1) & 0x80000000) == 0)
9853 /* If the high bit is set and the low bit is not, or the mask is all
9854 1's, the value is zero. */
9855 if ((val
& 1) == 0 || (val
& 0xffffffff) == 0xffffffff)
9858 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
9861 while (((val
>>= 1) & 1) != 0)
9871 unsigned long val
= INT_LOWPART (op
);
9873 /* If the low bit is zero, the value is the first 1 bit we find from
9877 if ((val
& 0xffffffff) == 0)
9881 while (((val
>>= 1) & 1) == 0)
9887 /* If the low bit is set and the high bit is not, or the mask is all
9888 1's, the value is 31. */
9889 if ((val
& 0x80000000) == 0 || (val
& 0xffffffff) == 0xffffffff)
9892 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
9895 while (((val
<<= 1) & 0x80000000) != 0)
9901 /* Locate some local-dynamic symbol still in use by this function
9902 so that we can print its name in some tls_ld pattern. */
9905 rs6000_get_some_local_dynamic_name (void)
9909 if (cfun
->machine
->some_ld_name
)
9910 return cfun
->machine
->some_ld_name
;
9912 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
9914 && for_each_rtx (&PATTERN (insn
),
9915 rs6000_get_some_local_dynamic_name_1
, 0))
9916 return cfun
->machine
->some_ld_name
;
9921 /* Helper function for rs6000_get_some_local_dynamic_name. */
9924 rs6000_get_some_local_dynamic_name_1 (rtx
*px
, void *data ATTRIBUTE_UNUSED
)
9928 if (GET_CODE (x
) == SYMBOL_REF
)
9930 const char *str
= XSTR (x
, 0);
9931 if (SYMBOL_REF_TLS_MODEL (x
) == TLS_MODEL_LOCAL_DYNAMIC
)
9933 cfun
->machine
->some_ld_name
= str
;
9941 /* Write out a function code label. */
9944 rs6000_output_function_entry (FILE *file
, const char *fname
)
9946 if (fname
[0] != '.')
9948 switch (DEFAULT_ABI
)
9957 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "L.");
9966 RS6000_OUTPUT_BASENAME (file
, fname
);
9968 assemble_name (file
, fname
);
9971 /* Print an operand. Recognize special options, documented below. */
9974 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
9975 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
9977 #define SMALL_DATA_RELOC "sda21"
9978 #define SMALL_DATA_REG 0
9982 print_operand (FILE *file
, rtx x
, int code
)
9986 unsigned HOST_WIDE_INT uval
;
9991 /* Write out an instruction after the call which may be replaced
9992 with glue code by the loader. This depends on the AIX version. */
9993 asm_fprintf (file
, RS6000_CALL_GLUE
);
9996 /* %a is output_address. */
9999 /* If X is a constant integer whose low-order 5 bits are zero,
10000 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
10001 in the AIX assembler where "sri" with a zero shift count
10002 writes a trash instruction. */
10003 if (GET_CODE (x
) == CONST_INT
&& (INTVAL (x
) & 31) == 0)
10010 /* If constant, low-order 16 bits of constant, unsigned.
10011 Otherwise, write normally. */
10013 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 0xffff);
10015 print_operand (file
, x
, 0);
10019 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
10020 for 64-bit mask direction. */
10021 putc (((INT_LOWPART(x
) & 1) == 0 ? 'r' : 'l'), file
);
10024 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
10028 /* X is a CR register. Print the number of the GT bit of the CR. */
10029 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
10030 output_operand_lossage ("invalid %%E value");
10032 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
) + 1);
10036 /* Like 'J' but get to the EQ bit. */
10037 if (GET_CODE (x
) != REG
)
10040 /* Bit 1 is EQ bit. */
10041 i
= 4 * (REGNO (x
) - CR0_REGNO
) + 2;
10043 /* If we want bit 31, write a shift count of zero, not 32. */
10044 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
10048 /* X is a CR register. Print the number of the EQ bit of the CR */
10049 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
10050 output_operand_lossage ("invalid %%E value");
10052 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
) + 2);
10056 /* X is a CR register. Print the shift count needed to move it
10057 to the high-order four bits. */
10058 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
10059 output_operand_lossage ("invalid %%f value");
10061 fprintf (file
, "%d", 4 * (REGNO (x
) - CR0_REGNO
));
10065 /* Similar, but print the count for the rotate in the opposite
10067 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
10068 output_operand_lossage ("invalid %%F value");
10070 fprintf (file
, "%d", 32 - 4 * (REGNO (x
) - CR0_REGNO
));
10074 /* X is a constant integer. If it is negative, print "m",
10075 otherwise print "z". This is to make an aze or ame insn. */
10076 if (GET_CODE (x
) != CONST_INT
)
10077 output_operand_lossage ("invalid %%G value");
10078 else if (INTVAL (x
) >= 0)
10085 /* If constant, output low-order five bits. Otherwise, write
10088 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 31);
10090 print_operand (file
, x
, 0);
10094 /* If constant, output low-order six bits. Otherwise, write
10097 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INT_LOWPART (x
) & 63);
10099 print_operand (file
, x
, 0);
10103 /* Print `i' if this is a constant, else nothing. */
10109 /* Write the bit number in CCR for jump. */
10110 i
= ccr_bit (x
, 0);
10112 output_operand_lossage ("invalid %%j code");
10114 fprintf (file
, "%d", i
);
10118 /* Similar, but add one for shift count in rlinm for scc and pass
10119 scc flag to `ccr_bit'. */
10120 i
= ccr_bit (x
, 1);
10122 output_operand_lossage ("invalid %%J code");
10124 /* If we want bit 31, write a shift count of zero, not 32. */
10125 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
10129 /* X must be a constant. Write the 1's complement of the
10132 output_operand_lossage ("invalid %%k value");
10134 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, ~ INT_LOWPART (x
));
10138 /* X must be a symbolic constant on ELF. Write an
10139 expression suitable for an 'addi' that adds in the low 16
10140 bits of the MEM. */
10141 if (GET_CODE (x
) != CONST
)
10143 print_operand_address (file
, x
);
10144 fputs ("@l", file
);
10148 if (GET_CODE (XEXP (x
, 0)) != PLUS
10149 || (GET_CODE (XEXP (XEXP (x
, 0), 0)) != SYMBOL_REF
10150 && GET_CODE (XEXP (XEXP (x
, 0), 0)) != LABEL_REF
)
10151 || GET_CODE (XEXP (XEXP (x
, 0), 1)) != CONST_INT
)
10152 output_operand_lossage ("invalid %%K value");
10153 print_operand_address (file
, XEXP (XEXP (x
, 0), 0));
10154 fputs ("@l", file
);
10155 /* For GNU as, there must be a non-alphanumeric character
10156 between 'l' and the number. The '-' is added by
10157 print_operand() already. */
10158 if (INTVAL (XEXP (XEXP (x
, 0), 1)) >= 0)
10160 print_operand (file
, XEXP (XEXP (x
, 0), 1), 0);
10164 /* %l is output_asm_label. */
10167 /* Write second word of DImode or DFmode reference. Works on register
10168 or non-indexed memory only. */
10169 if (GET_CODE (x
) == REG
)
10170 fputs (reg_names
[REGNO (x
) + 1], file
);
10171 else if (GET_CODE (x
) == MEM
)
10173 /* Handle possible auto-increment. Since it is pre-increment and
10174 we have already done it, we can just use an offset of word. */
10175 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
10176 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
10177 output_address (plus_constant (XEXP (XEXP (x
, 0), 0),
10180 output_address (XEXP (adjust_address_nv (x
, SImode
,
10184 if (small_data_operand (x
, GET_MODE (x
)))
10185 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
10186 reg_names
[SMALL_DATA_REG
]);
10191 /* MB value for a mask operand. */
10192 if (! mask_operand (x
, SImode
))
10193 output_operand_lossage ("invalid %%m value");
10195 fprintf (file
, "%d", extract_MB (x
));
10199 /* ME value for a mask operand. */
10200 if (! mask_operand (x
, SImode
))
10201 output_operand_lossage ("invalid %%M value");
10203 fprintf (file
, "%d", extract_ME (x
));
10206 /* %n outputs the negative of its operand. */
10209 /* Write the number of elements in the vector times 4. */
10210 if (GET_CODE (x
) != PARALLEL
)
10211 output_operand_lossage ("invalid %%N value");
10213 fprintf (file
, "%d", XVECLEN (x
, 0) * 4);
10217 /* Similar, but subtract 1 first. */
10218 if (GET_CODE (x
) != PARALLEL
)
10219 output_operand_lossage ("invalid %%O value");
10221 fprintf (file
, "%d", (XVECLEN (x
, 0) - 1) * 4);
10225 /* X is a CONST_INT that is a power of two. Output the logarithm. */
10227 || INT_LOWPART (x
) < 0
10228 || (i
= exact_log2 (INT_LOWPART (x
))) < 0)
10229 output_operand_lossage ("invalid %%p value");
10231 fprintf (file
, "%d", i
);
10235 /* The operand must be an indirect memory reference. The result
10236 is the register name. */
10237 if (GET_CODE (x
) != MEM
|| GET_CODE (XEXP (x
, 0)) != REG
10238 || REGNO (XEXP (x
, 0)) >= 32)
10239 output_operand_lossage ("invalid %%P value");
10241 fputs (reg_names
[REGNO (XEXP (x
, 0))], file
);
10245 /* This outputs the logical code corresponding to a boolean
10246 expression. The expression may have one or both operands
10247 negated (if one, only the first one). For condition register
10248 logical operations, it will also treat the negated
10249 CR codes as NOTs, but not handle NOTs of them. */
10251 const char *const *t
= 0;
10253 enum rtx_code code
= GET_CODE (x
);
10254 static const char * const tbl
[3][3] = {
10255 { "and", "andc", "nor" },
10256 { "or", "orc", "nand" },
10257 { "xor", "eqv", "xor" } };
10261 else if (code
== IOR
)
10263 else if (code
== XOR
)
10266 output_operand_lossage ("invalid %%q value");
10268 if (GET_CODE (XEXP (x
, 0)) != NOT
)
10272 if (GET_CODE (XEXP (x
, 1)) == NOT
)
10290 /* X is a CR register. Print the mask for `mtcrf'. */
10291 if (GET_CODE (x
) != REG
|| ! CR_REGNO_P (REGNO (x
)))
10292 output_operand_lossage ("invalid %%R value");
10294 fprintf (file
, "%d", 128 >> (REGNO (x
) - CR0_REGNO
));
10298 /* Low 5 bits of 32 - value */
10300 output_operand_lossage ("invalid %%s value");
10302 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, (32 - INT_LOWPART (x
)) & 31);
10306 /* PowerPC64 mask position. All 0's is excluded.
10307 CONST_INT 32-bit mask is considered sign-extended so any
10308 transition must occur within the CONST_INT, not on the boundary. */
10309 if (! mask64_operand (x
, DImode
))
10310 output_operand_lossage ("invalid %%S value");
10312 uval
= INT_LOWPART (x
);
10314 if (uval
& 1) /* Clear Left */
10316 #if HOST_BITS_PER_WIDE_INT > 64
10317 uval
&= ((unsigned HOST_WIDE_INT
) 1 << 64) - 1;
10321 else /* Clear Right */
10324 #if HOST_BITS_PER_WIDE_INT > 64
10325 uval
&= ((unsigned HOST_WIDE_INT
) 1 << 64) - 1;
10333 fprintf (file
, "%d", i
);
10337 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
10338 if (GET_CODE (x
) != REG
|| GET_MODE (x
) != CCmode
)
10341 /* Bit 3 is OV bit. */
10342 i
= 4 * (REGNO (x
) - CR0_REGNO
) + 3;
10344 /* If we want bit 31, write a shift count of zero, not 32. */
10345 fprintf (file
, "%d", i
== 31 ? 0 : i
+ 1);
10349 /* Print the symbolic name of a branch target register. */
10350 if (GET_CODE (x
) != REG
|| (REGNO (x
) != LINK_REGISTER_REGNUM
10351 && REGNO (x
) != COUNT_REGISTER_REGNUM
))
10352 output_operand_lossage ("invalid %%T value");
10353 else if (REGNO (x
) == LINK_REGISTER_REGNUM
)
10354 fputs (TARGET_NEW_MNEMONICS
? "lr" : "r", file
);
10356 fputs ("ctr", file
);
10360 /* High-order 16 bits of constant for use in unsigned operand. */
10362 output_operand_lossage ("invalid %%u value");
10364 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
10365 (INT_LOWPART (x
) >> 16) & 0xffff);
10369 /* High-order 16 bits of constant for use in signed operand. */
10371 output_operand_lossage ("invalid %%v value");
10373 fprintf (file
, HOST_WIDE_INT_PRINT_HEX
,
10374 (INT_LOWPART (x
) >> 16) & 0xffff);
10378 /* Print `u' if this has an auto-increment or auto-decrement. */
10379 if (GET_CODE (x
) == MEM
10380 && (GET_CODE (XEXP (x
, 0)) == PRE_INC
10381 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
))
10386 /* Print the trap code for this operand. */
10387 switch (GET_CODE (x
))
10390 fputs ("eq", file
); /* 4 */
10393 fputs ("ne", file
); /* 24 */
10396 fputs ("lt", file
); /* 16 */
10399 fputs ("le", file
); /* 20 */
10402 fputs ("gt", file
); /* 8 */
10405 fputs ("ge", file
); /* 12 */
10408 fputs ("llt", file
); /* 2 */
10411 fputs ("lle", file
); /* 6 */
10414 fputs ("lgt", file
); /* 1 */
10417 fputs ("lge", file
); /* 5 */
10425 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
10428 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
,
10429 ((INT_LOWPART (x
) & 0xffff) ^ 0x8000) - 0x8000);
10431 print_operand (file
, x
, 0);
10435 /* MB value for a PowerPC64 rldic operand. */
10436 val
= (GET_CODE (x
) == CONST_INT
10437 ? INTVAL (x
) : CONST_DOUBLE_HIGH (x
));
10442 for (i
= 0; i
< HOST_BITS_PER_WIDE_INT
; i
++)
10443 if ((val
<<= 1) < 0)
10446 #if HOST_BITS_PER_WIDE_INT == 32
10447 if (GET_CODE (x
) == CONST_INT
&& i
>= 0)
10448 i
+= 32; /* zero-extend high-part was all 0's */
10449 else if (GET_CODE (x
) == CONST_DOUBLE
&& i
== 32)
10451 val
= CONST_DOUBLE_LOW (x
);
10458 for ( ; i
< 64; i
++)
10459 if ((val
<<= 1) < 0)
10464 fprintf (file
, "%d", i
+ 1);
10468 if (GET_CODE (x
) == MEM
10469 && legitimate_indexed_address_p (XEXP (x
, 0), 0))
10474 /* Like 'L', for third word of TImode */
10475 if (GET_CODE (x
) == REG
)
10476 fputs (reg_names
[REGNO (x
) + 2], file
);
10477 else if (GET_CODE (x
) == MEM
)
10479 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
10480 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
10481 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 8));
10483 output_address (XEXP (adjust_address_nv (x
, SImode
, 8), 0));
10484 if (small_data_operand (x
, GET_MODE (x
)))
10485 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
10486 reg_names
[SMALL_DATA_REG
]);
10491 /* X is a SYMBOL_REF. Write out the name preceded by a
10492 period and without any trailing data in brackets. Used for function
10493 names. If we are configured for System V (or the embedded ABI) on
10494 the PowerPC, do not emit the period, since those systems do not use
10495 TOCs and the like. */
10496 if (GET_CODE (x
) != SYMBOL_REF
)
10499 /* Mark the decl as referenced so that cgraph will output the function. */
10500 if (SYMBOL_REF_DECL (x
))
10501 mark_decl_referenced (SYMBOL_REF_DECL (x
));
10503 /* For macho, check to see if we need a stub. */
10506 const char *name
= XSTR (x
, 0);
10508 if (MACHOPIC_INDIRECT
10509 && machopic_classify_symbol (x
) == MACHOPIC_UNDEFINED_FUNCTION
)
10510 name
= machopic_indirection_name (x
, /*stub_p=*/true);
10512 assemble_name (file
, name
);
10514 else if (!DOT_SYMBOLS
)
10515 assemble_name (file
, XSTR (x
, 0));
10517 rs6000_output_function_entry (file
, XSTR (x
, 0));
10521 /* Like 'L', for last word of TImode. */
10522 if (GET_CODE (x
) == REG
)
10523 fputs (reg_names
[REGNO (x
) + 3], file
);
10524 else if (GET_CODE (x
) == MEM
)
10526 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
10527 || GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
10528 output_address (plus_constant (XEXP (XEXP (x
, 0), 0), 12));
10530 output_address (XEXP (adjust_address_nv (x
, SImode
, 12), 0));
10531 if (small_data_operand (x
, GET_MODE (x
)))
10532 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
10533 reg_names
[SMALL_DATA_REG
]);
10537 /* Print AltiVec or SPE memory operand. */
10542 if (GET_CODE (x
) != MEM
)
10549 /* Handle [reg]. */
10550 if (GET_CODE (tmp
) == REG
)
10552 fprintf (file
, "0(%s)", reg_names
[REGNO (tmp
)]);
10555 /* Handle [reg+UIMM]. */
10556 else if (GET_CODE (tmp
) == PLUS
&&
10557 GET_CODE (XEXP (tmp
, 1)) == CONST_INT
)
10561 if (GET_CODE (XEXP (tmp
, 0)) != REG
)
10564 x
= INTVAL (XEXP (tmp
, 1));
10565 fprintf (file
, "%d(%s)", x
, reg_names
[REGNO (XEXP (tmp
, 0))]);
10569 /* Fall through. Must be [reg+reg]. */
10572 && GET_CODE (tmp
) == AND
10573 && GET_CODE (XEXP (tmp
, 1)) == CONST_INT
10574 && INTVAL (XEXP (tmp
, 1)) == -16)
10575 tmp
= XEXP (tmp
, 0);
10576 if (GET_CODE (tmp
) == REG
)
10577 fprintf (file
, "0,%s", reg_names
[REGNO (tmp
)]);
10578 else if (GET_CODE (tmp
) == PLUS
&& GET_CODE (XEXP (tmp
, 1)) == REG
)
10580 if (REGNO (XEXP (tmp
, 0)) == 0)
10581 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 1)) ],
10582 reg_names
[ REGNO (XEXP (tmp
, 0)) ]);
10584 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (tmp
, 0)) ],
10585 reg_names
[ REGNO (XEXP (tmp
, 1)) ]);
10593 if (GET_CODE (x
) == REG
)
10594 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
10595 else if (GET_CODE (x
) == MEM
)
10597 /* We need to handle PRE_INC and PRE_DEC here, since we need to
10598 know the width from the mode. */
10599 if (GET_CODE (XEXP (x
, 0)) == PRE_INC
)
10600 fprintf (file
, "%d(%s)", GET_MODE_SIZE (GET_MODE (x
)),
10601 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
10602 else if (GET_CODE (XEXP (x
, 0)) == PRE_DEC
)
10603 fprintf (file
, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x
)),
10604 reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))]);
10606 output_address (XEXP (x
, 0));
10609 output_addr_const (file
, x
);
10613 assemble_name (file
, rs6000_get_some_local_dynamic_name ());
10617 output_operand_lossage ("invalid %%xn code");
10621 /* Print the address of an operand. */
10624 print_operand_address (FILE *file
, rtx x
)
10626 if (GET_CODE (x
) == REG
)
10627 fprintf (file
, "0(%s)", reg_names
[ REGNO (x
) ]);
10628 else if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == CONST
10629 || GET_CODE (x
) == LABEL_REF
)
10631 output_addr_const (file
, x
);
10632 if (small_data_operand (x
, GET_MODE (x
)))
10633 fprintf (file
, "@%s(%s)", SMALL_DATA_RELOC
,
10634 reg_names
[SMALL_DATA_REG
]);
10635 else if (TARGET_TOC
)
10638 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == REG
)
10640 if (REGNO (XEXP (x
, 0)) == 0)
10641 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 1)) ],
10642 reg_names
[ REGNO (XEXP (x
, 0)) ]);
10644 fprintf (file
, "%s,%s", reg_names
[ REGNO (XEXP (x
, 0)) ],
10645 reg_names
[ REGNO (XEXP (x
, 1)) ]);
10647 else if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
)
10648 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
"(%s)",
10649 INTVAL (XEXP (x
, 1)), reg_names
[ REGNO (XEXP (x
, 0)) ]);
10651 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
10652 && CONSTANT_P (XEXP (x
, 1)))
10654 output_addr_const (file
, XEXP (x
, 1));
10655 fprintf (file
, "@l(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
10659 else if (GET_CODE (x
) == LO_SUM
&& GET_CODE (XEXP (x
, 0)) == REG
10660 && CONSTANT_P (XEXP (x
, 1)))
10662 fprintf (file
, "lo16(");
10663 output_addr_const (file
, XEXP (x
, 1));
10664 fprintf (file
, ")(%s)", reg_names
[ REGNO (XEXP (x
, 0)) ]);
10667 else if (legitimate_constant_pool_address_p (x
))
10669 if (TARGET_AIX
&& (!TARGET_ELF
|| !TARGET_MINIMAL_TOC
))
10671 rtx contains_minus
= XEXP (x
, 1);
10675 /* Find the (minus (sym) (toc)) buried in X, and temporarily
10676 turn it into (sym) for output_addr_const. */
10677 while (GET_CODE (XEXP (contains_minus
, 0)) != MINUS
)
10678 contains_minus
= XEXP (contains_minus
, 0);
10680 minus
= XEXP (contains_minus
, 0);
10681 symref
= XEXP (minus
, 0);
10682 XEXP (contains_minus
, 0) = symref
;
10687 name
= XSTR (symref
, 0);
10688 newname
= alloca (strlen (name
) + sizeof ("@toc"));
10689 strcpy (newname
, name
);
10690 strcat (newname
, "@toc");
10691 XSTR (symref
, 0) = newname
;
10693 output_addr_const (file
, XEXP (x
, 1));
10695 XSTR (symref
, 0) = name
;
10696 XEXP (contains_minus
, 0) = minus
;
10699 output_addr_const (file
, XEXP (x
, 1));
10701 fprintf (file
, "(%s)", reg_names
[REGNO (XEXP (x
, 0))]);
10707 /* Target hook for assembling integer objects. The PowerPC version has
10708 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
10709 is defined. It also needs to handle DI-mode objects on 64-bit
10713 rs6000_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
10715 #ifdef RELOCATABLE_NEEDS_FIXUP
10716 /* Special handling for SI values. */
10717 if (size
== 4 && aligned_p
)
10719 extern int in_toc_section (void);
10720 static int recurse
= 0;
10722 /* For -mrelocatable, we mark all addresses that need to be fixed up
10723 in the .fixup section. */
10724 if (TARGET_RELOCATABLE
10725 && !in_toc_section ()
10726 && !in_text_section ()
10727 && !in_unlikely_text_section ()
10729 && GET_CODE (x
) != CONST_INT
10730 && GET_CODE (x
) != CONST_DOUBLE
10736 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCP", fixuplabelno
);
10738 ASM_OUTPUT_LABEL (asm_out_file
, buf
);
10739 fprintf (asm_out_file
, "\t.long\t(");
10740 output_addr_const (asm_out_file
, x
);
10741 fprintf (asm_out_file
, ")@fixup\n");
10742 fprintf (asm_out_file
, "\t.section\t\".fixup\",\"aw\"\n");
10743 ASM_OUTPUT_ALIGN (asm_out_file
, 2);
10744 fprintf (asm_out_file
, "\t.long\t");
10745 assemble_name (asm_out_file
, buf
);
10746 fprintf (asm_out_file
, "\n\t.previous\n");
10750 /* Remove initial .'s to turn a -mcall-aixdesc function
10751 address into the address of the descriptor, not the function
10753 else if (GET_CODE (x
) == SYMBOL_REF
10754 && XSTR (x
, 0)[0] == '.'
10755 && DEFAULT_ABI
== ABI_AIX
)
10757 const char *name
= XSTR (x
, 0);
10758 while (*name
== '.')
10761 fprintf (asm_out_file
, "\t.long\t%s\n", name
);
10765 #endif /* RELOCATABLE_NEEDS_FIXUP */
10766 return default_assemble_integer (x
, size
, aligned_p
);
10769 #ifdef HAVE_GAS_HIDDEN
10770 /* Emit an assembler directive to set symbol visibility for DECL to
10771 VISIBILITY_TYPE. */
10774 rs6000_assemble_visibility (tree decl
, int vis
)
10776 /* Functions need to have their entry point symbol visibility set as
10777 well as their descriptor symbol visibility. */
10778 if (DEFAULT_ABI
== ABI_AIX
10780 && TREE_CODE (decl
) == FUNCTION_DECL
)
10782 static const char * const visibility_types
[] = {
10783 NULL
, "internal", "hidden", "protected"
10786 const char *name
, *type
;
10788 name
= ((* targetm
.strip_name_encoding
)
10789 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
))));
10790 type
= visibility_types
[vis
];
10792 fprintf (asm_out_file
, "\t.%s\t%s\n", type
, name
);
10793 fprintf (asm_out_file
, "\t.%s\t.%s\n", type
, name
);
10796 default_assemble_visibility (decl
, vis
);
10801 rs6000_reverse_condition (enum machine_mode mode
, enum rtx_code code
)
10803 /* Reversal of FP compares takes care -- an ordered compare
10804 becomes an unordered compare and vice versa. */
10805 if (mode
== CCFPmode
10806 && (!flag_finite_math_only
10807 || code
== UNLT
|| code
== UNLE
|| code
== UNGT
|| code
== UNGE
10808 || code
== UNEQ
|| code
== LTGT
))
10809 return reverse_condition_maybe_unordered (code
);
10811 return reverse_condition (code
);
10814 /* Generate a compare for CODE. Return a brand-new rtx that
10815 represents the result of the compare. */
10818 rs6000_generate_compare (enum rtx_code code
)
10820 enum machine_mode comp_mode
;
10821 rtx compare_result
;
10823 if (rs6000_compare_fp_p
)
10824 comp_mode
= CCFPmode
;
10825 else if (code
== GTU
|| code
== LTU
10826 || code
== GEU
|| code
== LEU
)
10827 comp_mode
= CCUNSmode
;
10829 comp_mode
= CCmode
;
10831 /* First, the compare. */
10832 compare_result
= gen_reg_rtx (comp_mode
);
10834 /* SPE FP compare instructions on the GPRs. Yuck! */
10835 if ((TARGET_E500
&& !TARGET_FPRS
&& TARGET_HARD_FLOAT
)
10836 && rs6000_compare_fp_p
)
10838 rtx cmp
, or1
, or2
, or_result
, compare_result2
;
10840 /* Note: The E500 comparison instructions set the GT bit (x +
10841 1), on success. This explains the mess. */
10845 case EQ
: case UNEQ
: case NE
: case LTGT
:
10846 cmp
= flag_finite_math_only
10847 ? gen_tstsfeq_gpr (compare_result
, rs6000_compare_op0
,
10848 rs6000_compare_op1
)
10849 : gen_cmpsfeq_gpr (compare_result
, rs6000_compare_op0
,
10850 rs6000_compare_op1
);
10852 case GT
: case GTU
: case UNGT
: case UNGE
: case GE
: case GEU
:
10853 cmp
= flag_finite_math_only
10854 ? gen_tstsfgt_gpr (compare_result
, rs6000_compare_op0
,
10855 rs6000_compare_op1
)
10856 : gen_cmpsfgt_gpr (compare_result
, rs6000_compare_op0
,
10857 rs6000_compare_op1
);
10859 case LT
: case LTU
: case UNLT
: case UNLE
: case LE
: case LEU
:
10860 cmp
= flag_finite_math_only
10861 ? gen_tstsflt_gpr (compare_result
, rs6000_compare_op0
,
10862 rs6000_compare_op1
)
10863 : gen_cmpsflt_gpr (compare_result
, rs6000_compare_op0
,
10864 rs6000_compare_op1
);
10870 /* Synthesize LE and GE from LT/GT || EQ. */
10871 if (code
== LE
|| code
== GE
|| code
== LEU
|| code
== GEU
)
10877 case LE
: code
= LT
; break;
10878 case GE
: code
= GT
; break;
10879 case LEU
: code
= LT
; break;
10880 case GEU
: code
= GT
; break;
10884 or1
= gen_reg_rtx (SImode
);
10885 or2
= gen_reg_rtx (SImode
);
10886 or_result
= gen_reg_rtx (CCEQmode
);
10887 compare_result2
= gen_reg_rtx (CCFPmode
);
10890 cmp
= flag_finite_math_only
10891 ? gen_tstsfeq_gpr (compare_result2
, rs6000_compare_op0
,
10892 rs6000_compare_op1
)
10893 : gen_cmpsfeq_gpr (compare_result2
, rs6000_compare_op0
,
10894 rs6000_compare_op1
);
10897 or1
= gen_rtx_GT (SImode
, compare_result
, const0_rtx
);
10898 or2
= gen_rtx_GT (SImode
, compare_result2
, const0_rtx
);
10900 /* OR them together. */
10901 cmp
= gen_rtx_SET (VOIDmode
, or_result
,
10902 gen_rtx_COMPARE (CCEQmode
,
10903 gen_rtx_IOR (SImode
, or1
, or2
),
10905 compare_result
= or_result
;
10910 if (code
== NE
|| code
== LTGT
)
10919 emit_insn (gen_rtx_SET (VOIDmode
, compare_result
,
10920 gen_rtx_COMPARE (comp_mode
,
10921 rs6000_compare_op0
,
10922 rs6000_compare_op1
)));
10924 /* Some kinds of FP comparisons need an OR operation;
10925 under flag_finite_math_only we don't bother. */
10926 if (rs6000_compare_fp_p
10927 && ! flag_finite_math_only
10928 && ! (TARGET_HARD_FLOAT
&& TARGET_E500
&& !TARGET_FPRS
)
10929 && (code
== LE
|| code
== GE
10930 || code
== UNEQ
|| code
== LTGT
10931 || code
== UNGT
|| code
== UNLT
))
10933 enum rtx_code or1
, or2
;
10934 rtx or1_rtx
, or2_rtx
, compare2_rtx
;
10935 rtx or_result
= gen_reg_rtx (CCEQmode
);
10939 case LE
: or1
= LT
; or2
= EQ
; break;
10940 case GE
: or1
= GT
; or2
= EQ
; break;
10941 case UNEQ
: or1
= UNORDERED
; or2
= EQ
; break;
10942 case LTGT
: or1
= LT
; or2
= GT
; break;
10943 case UNGT
: or1
= UNORDERED
; or2
= GT
; break;
10944 case UNLT
: or1
= UNORDERED
; or2
= LT
; break;
10947 validate_condition_mode (or1
, comp_mode
);
10948 validate_condition_mode (or2
, comp_mode
);
10949 or1_rtx
= gen_rtx_fmt_ee (or1
, SImode
, compare_result
, const0_rtx
);
10950 or2_rtx
= gen_rtx_fmt_ee (or2
, SImode
, compare_result
, const0_rtx
);
10951 compare2_rtx
= gen_rtx_COMPARE (CCEQmode
,
10952 gen_rtx_IOR (SImode
, or1_rtx
, or2_rtx
),
10954 emit_insn (gen_rtx_SET (VOIDmode
, or_result
, compare2_rtx
));
10956 compare_result
= or_result
;
10960 validate_condition_mode (code
, GET_MODE (compare_result
));
10962 return gen_rtx_fmt_ee (code
, VOIDmode
, compare_result
, const0_rtx
);
10966 /* Emit the RTL for an sCOND pattern. */
10969 rs6000_emit_sCOND (enum rtx_code code
, rtx result
)
10972 enum machine_mode op_mode
;
10973 enum rtx_code cond_code
;
10975 condition_rtx
= rs6000_generate_compare (code
);
10976 cond_code
= GET_CODE (condition_rtx
);
10978 if (TARGET_E500
&& rs6000_compare_fp_p
10979 && !TARGET_FPRS
&& TARGET_HARD_FLOAT
)
10983 PUT_MODE (condition_rtx
, SImode
);
10984 t
= XEXP (condition_rtx
, 0);
10986 if (cond_code
!= NE
&& cond_code
!= EQ
)
10989 if (cond_code
== NE
)
10990 emit_insn (gen_e500_flip_eq_bit (t
, t
));
10992 emit_insn (gen_move_from_CR_eq_bit (result
, t
));
10996 if (cond_code
== NE
10997 || cond_code
== GE
|| cond_code
== LE
10998 || cond_code
== GEU
|| cond_code
== LEU
10999 || cond_code
== ORDERED
|| cond_code
== UNGE
|| cond_code
== UNLE
)
11001 rtx not_result
= gen_reg_rtx (CCEQmode
);
11002 rtx not_op
, rev_cond_rtx
;
11003 enum machine_mode cc_mode
;
11005 cc_mode
= GET_MODE (XEXP (condition_rtx
, 0));
11007 rev_cond_rtx
= gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode
, cond_code
),
11008 SImode
, XEXP (condition_rtx
, 0), const0_rtx
);
11009 not_op
= gen_rtx_COMPARE (CCEQmode
, rev_cond_rtx
, const0_rtx
);
11010 emit_insn (gen_rtx_SET (VOIDmode
, not_result
, not_op
));
11011 condition_rtx
= gen_rtx_EQ (VOIDmode
, not_result
, const0_rtx
);
11014 op_mode
= GET_MODE (rs6000_compare_op0
);
11015 if (op_mode
== VOIDmode
)
11016 op_mode
= GET_MODE (rs6000_compare_op1
);
11018 if (TARGET_POWERPC64
&& (op_mode
== DImode
|| rs6000_compare_fp_p
))
11020 PUT_MODE (condition_rtx
, DImode
);
11021 convert_move (result
, condition_rtx
, 0);
11025 PUT_MODE (condition_rtx
, SImode
);
11026 emit_insn (gen_rtx_SET (VOIDmode
, result
, condition_rtx
));
11030 /* Emit a branch of kind CODE to location LOC. */
11033 rs6000_emit_cbranch (enum rtx_code code
, rtx loc
)
11035 rtx condition_rtx
, loc_ref
;
11037 condition_rtx
= rs6000_generate_compare (code
);
11038 loc_ref
= gen_rtx_LABEL_REF (VOIDmode
, loc
);
11039 emit_jump_insn (gen_rtx_SET (VOIDmode
, pc_rtx
,
11040 gen_rtx_IF_THEN_ELSE (VOIDmode
, condition_rtx
,
11041 loc_ref
, pc_rtx
)));
11044 /* Return the string to output a conditional branch to LABEL, which is
11045 the operand number of the label, or -1 if the branch is really a
11046 conditional return.
11048 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
11049 condition code register and its mode specifies what kind of
11050 comparison we made.
11052 REVERSED is nonzero if we should reverse the sense of the comparison.
11054 INSN is the insn. */
11057 output_cbranch (rtx op
, const char *label
, int reversed
, rtx insn
)
11059 static char string
[64];
11060 enum rtx_code code
= GET_CODE (op
);
11061 rtx cc_reg
= XEXP (op
, 0);
11062 enum machine_mode mode
= GET_MODE (cc_reg
);
11063 int cc_regno
= REGNO (cc_reg
) - CR0_REGNO
;
11064 int need_longbranch
= label
!= NULL
&& get_attr_length (insn
) == 8;
11065 int really_reversed
= reversed
^ need_longbranch
;
11071 validate_condition_mode (code
, mode
);
11073 /* Work out which way this really branches. We could use
11074 reverse_condition_maybe_unordered here always but this
11075 makes the resulting assembler clearer. */
11076 if (really_reversed
)
11078 /* Reversal of FP compares takes care -- an ordered compare
11079 becomes an unordered compare and vice versa. */
11080 if (mode
== CCFPmode
)
11081 code
= reverse_condition_maybe_unordered (code
);
11083 code
= reverse_condition (code
);
11086 if ((TARGET_E500
&& !TARGET_FPRS
&& TARGET_HARD_FLOAT
) && mode
== CCFPmode
)
11088 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
11091 /* Opposite of GT. */
11093 else if (code
== NE
)
11101 /* Not all of these are actually distinct opcodes, but
11102 we distinguish them for clarity of the resulting assembler. */
11103 case NE
: case LTGT
:
11104 ccode
= "ne"; break;
11105 case EQ
: case UNEQ
:
11106 ccode
= "eq"; break;
11108 ccode
= "ge"; break;
11109 case GT
: case GTU
: case UNGT
:
11110 ccode
= "gt"; break;
11112 ccode
= "le"; break;
11113 case LT
: case LTU
: case UNLT
:
11114 ccode
= "lt"; break;
11115 case UNORDERED
: ccode
= "un"; break;
11116 case ORDERED
: ccode
= "nu"; break;
11117 case UNGE
: ccode
= "nl"; break;
11118 case UNLE
: ccode
= "ng"; break;
11123 /* Maybe we have a guess as to how likely the branch is.
11124 The old mnemonics don't have a way to specify this information. */
11126 note
= find_reg_note (insn
, REG_BR_PROB
, NULL_RTX
);
11127 if (note
!= NULL_RTX
)
11129 /* PROB is the difference from 50%. */
11130 int prob
= INTVAL (XEXP (note
, 0)) - REG_BR_PROB_BASE
/ 2;
11132 /* Only hint for highly probable/improbable branches on newer
11133 cpus as static prediction overrides processor dynamic
11134 prediction. For older cpus we may as well always hint, but
11135 assume not taken for branches that are very close to 50% as a
11136 mispredicted taken branch is more expensive than a
11137 mispredicted not-taken branch. */
11138 if (rs6000_always_hint
11139 || abs (prob
) > REG_BR_PROB_BASE
/ 100 * 48)
11141 if (abs (prob
) > REG_BR_PROB_BASE
/ 20
11142 && ((prob
> 0) ^ need_longbranch
))
11150 s
+= sprintf (s
, "{b%sr|b%slr%s} ", ccode
, ccode
, pred
);
11152 s
+= sprintf (s
, "{b%s|b%s%s} ", ccode
, ccode
, pred
);
11154 /* We need to escape any '%' characters in the reg_names string.
11155 Assume they'd only be the first character.... */
11156 if (reg_names
[cc_regno
+ CR0_REGNO
][0] == '%')
11158 s
+= sprintf (s
, "%s", reg_names
[cc_regno
+ CR0_REGNO
]);
11162 /* If the branch distance was too far, we may have to use an
11163 unconditional branch to go the distance. */
11164 if (need_longbranch
)
11165 s
+= sprintf (s
, ",$+8\n\tb %s", label
);
11167 s
+= sprintf (s
, ",%s", label
);
11173 /* Return the string to flip the EQ bit on a CR. */
11175 output_e500_flip_eq_bit (rtx dst
, rtx src
)
11177 static char string
[64];
11180 if (GET_CODE (dst
) != REG
|| ! CR_REGNO_P (REGNO (dst
))
11181 || GET_CODE (src
) != REG
|| ! CR_REGNO_P (REGNO (src
)))
11185 a
= 4 * (REGNO (dst
) - CR0_REGNO
) + 2;
11186 b
= 4 * (REGNO (src
) - CR0_REGNO
) + 2;
11188 sprintf (string
, "crnot %d,%d", a
, b
);
11192 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
11193 operands of the last comparison is nonzero/true, FALSE_COND if it
11194 is zero/false. Return 0 if the hardware has no such operation. */
11197 rs6000_emit_cmove (rtx dest
, rtx op
, rtx true_cond
, rtx false_cond
)
11199 enum rtx_code code
= GET_CODE (op
);
11200 rtx op0
= rs6000_compare_op0
;
11201 rtx op1
= rs6000_compare_op1
;
11202 REAL_VALUE_TYPE c1
;
11203 enum machine_mode compare_mode
= GET_MODE (op0
);
11204 enum machine_mode result_mode
= GET_MODE (dest
);
11207 /* These modes should always match. */
11208 if (GET_MODE (op1
) != compare_mode
11209 /* In the isel case however, we can use a compare immediate, so
11210 op1 may be a small constant. */
11211 && (!TARGET_ISEL
|| !short_cint_operand (op1
, VOIDmode
)))
11213 if (GET_MODE (true_cond
) != result_mode
)
11215 if (GET_MODE (false_cond
) != result_mode
)
11218 /* First, work out if the hardware can do this at all, or
11219 if it's too slow.... */
11220 if (! rs6000_compare_fp_p
)
11223 return rs6000_emit_int_cmove (dest
, op
, true_cond
, false_cond
);
11226 else if (TARGET_E500
&& TARGET_HARD_FLOAT
&& !TARGET_FPRS
11227 && GET_MODE_CLASS (compare_mode
) == MODE_FLOAT
)
11230 /* Eliminate half of the comparisons by switching operands, this
11231 makes the remaining code simpler. */
11232 if (code
== UNLT
|| code
== UNGT
|| code
== UNORDERED
|| code
== NE
11233 || code
== LTGT
|| code
== LT
|| code
== UNLE
)
11235 code
= reverse_condition_maybe_unordered (code
);
11237 true_cond
= false_cond
;
11241 /* UNEQ and LTGT take four instructions for a comparison with zero,
11242 it'll probably be faster to use a branch here too. */
11243 if (code
== UNEQ
&& HONOR_NANS (compare_mode
))
11246 if (GET_CODE (op1
) == CONST_DOUBLE
)
11247 REAL_VALUE_FROM_CONST_DOUBLE (c1
, op1
);
11249 /* We're going to try to implement comparisons by performing
11250 a subtract, then comparing against zero. Unfortunately,
11251 Inf - Inf is NaN which is not zero, and so if we don't
11252 know that the operand is finite and the comparison
11253 would treat EQ different to UNORDERED, we can't do it. */
11254 if (HONOR_INFINITIES (compare_mode
)
11255 && code
!= GT
&& code
!= UNGE
11256 && (GET_CODE (op1
) != CONST_DOUBLE
|| real_isinf (&c1
))
11257 /* Constructs of the form (a OP b ? a : b) are safe. */
11258 && ((! rtx_equal_p (op0
, false_cond
) && ! rtx_equal_p (op1
, false_cond
))
11259 || (! rtx_equal_p (op0
, true_cond
)
11260 && ! rtx_equal_p (op1
, true_cond
))))
11262 /* At this point we know we can use fsel. */
11264 /* Reduce the comparison to a comparison against zero. */
11265 temp
= gen_reg_rtx (compare_mode
);
11266 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
11267 gen_rtx_MINUS (compare_mode
, op0
, op1
)));
11269 op1
= CONST0_RTX (compare_mode
);
11271 /* If we don't care about NaNs we can reduce some of the comparisons
11272 down to faster ones. */
11273 if (! HONOR_NANS (compare_mode
))
11279 true_cond
= false_cond
;
11292 /* Now, reduce everything down to a GE. */
11299 temp
= gen_reg_rtx (compare_mode
);
11300 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
11305 temp
= gen_reg_rtx (compare_mode
);
11306 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_ABS (compare_mode
, op0
)));
11311 temp
= gen_reg_rtx (compare_mode
);
11312 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
11313 gen_rtx_NEG (compare_mode
,
11314 gen_rtx_ABS (compare_mode
, op0
))));
11319 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
11320 temp
= gen_reg_rtx (result_mode
);
11321 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
11322 gen_rtx_IF_THEN_ELSE (result_mode
,
11323 gen_rtx_GE (VOIDmode
,
11325 true_cond
, false_cond
)));
11326 false_cond
= true_cond
;
11329 temp
= gen_reg_rtx (compare_mode
);
11330 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
11335 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
11336 temp
= gen_reg_rtx (result_mode
);
11337 emit_insn (gen_rtx_SET (VOIDmode
, temp
,
11338 gen_rtx_IF_THEN_ELSE (result_mode
,
11339 gen_rtx_GE (VOIDmode
,
11341 true_cond
, false_cond
)));
11342 true_cond
= false_cond
;
11345 temp
= gen_reg_rtx (compare_mode
);
11346 emit_insn (gen_rtx_SET (VOIDmode
, temp
, gen_rtx_NEG (compare_mode
, op0
)));
11354 emit_insn (gen_rtx_SET (VOIDmode
, dest
,
11355 gen_rtx_IF_THEN_ELSE (result_mode
,
11356 gen_rtx_GE (VOIDmode
,
11358 true_cond
, false_cond
)));
11362 /* Same as above, but for ints (isel). */
11365 rs6000_emit_int_cmove (rtx dest
, rtx op
, rtx true_cond
, rtx false_cond
)
11367 rtx condition_rtx
, cr
;
11369 /* All isel implementations thus far are 32-bits. */
11370 if (GET_MODE (rs6000_compare_op0
) != SImode
)
11373 /* We still have to do the compare, because isel doesn't do a
11374 compare, it just looks at the CRx bits set by a previous compare
11376 condition_rtx
= rs6000_generate_compare (GET_CODE (op
));
11377 cr
= XEXP (condition_rtx
, 0);
11379 if (GET_MODE (cr
) == CCmode
)
11380 emit_insn (gen_isel_signed (dest
, condition_rtx
,
11381 true_cond
, false_cond
, cr
));
11383 emit_insn (gen_isel_unsigned (dest
, condition_rtx
,
11384 true_cond
, false_cond
, cr
));
11390 output_isel (rtx
*operands
)
11392 enum rtx_code code
;
11394 code
= GET_CODE (operands
[1]);
11395 if (code
== GE
|| code
== GEU
|| code
== LE
|| code
== LEU
|| code
== NE
)
11397 PUT_CODE (operands
[1], reverse_condition (code
));
11398 return "isel %0,%3,%2,%j1";
11401 return "isel %0,%2,%3,%j1";
11405 rs6000_emit_minmax (rtx dest
, enum rtx_code code
, rtx op0
, rtx op1
)
11407 enum machine_mode mode
= GET_MODE (op0
);
11411 if (code
== SMAX
|| code
== SMIN
)
11416 if (code
== SMAX
|| code
== UMAX
)
11417 target
= emit_conditional_move (dest
, c
, op0
, op1
, mode
,
11418 op0
, op1
, mode
, 0);
11420 target
= emit_conditional_move (dest
, c
, op0
, op1
, mode
,
11421 op1
, op0
, mode
, 0);
11422 if (target
== NULL_RTX
)
11424 if (target
!= dest
)
11425 emit_move_insn (dest
, target
);
11428 /* Emit instructions to move SRC to DST. Called by splitters for
11429 multi-register moves. It will emit at most one instruction for
11430 each register that is accessed; that is, it won't emit li/lis pairs
11431 (or equivalent for 64-bit code). One of SRC or DST must be a hard
11435 rs6000_split_multireg_move (rtx dst
, rtx src
)
11437 /* The register number of the first register being moved. */
11439 /* The mode that is to be moved. */
11440 enum machine_mode mode
;
11441 /* The mode that the move is being done in, and its size. */
11442 enum machine_mode reg_mode
;
11444 /* The number of registers that will be moved. */
11447 reg
= REG_P (dst
) ? REGNO (dst
) : REGNO (src
);
11448 mode
= GET_MODE (dst
);
11449 nregs
= HARD_REGNO_NREGS (reg
, mode
);
11450 if (FP_REGNO_P (reg
))
11452 else if (ALTIVEC_REGNO_P (reg
))
11453 reg_mode
= V16QImode
;
11455 reg_mode
= word_mode
;
11456 reg_mode_size
= GET_MODE_SIZE (reg_mode
);
11458 if (reg_mode_size
* nregs
!= GET_MODE_SIZE (mode
))
11461 if (REG_P (src
) && REG_P (dst
) && (REGNO (src
) < REGNO (dst
)))
11463 /* Move register range backwards, if we might have destructive
11466 for (i
= nregs
- 1; i
>= 0; i
--)
11467 emit_insn (gen_rtx_SET (VOIDmode
,
11468 simplify_gen_subreg (reg_mode
, dst
, mode
,
11469 i
* reg_mode_size
),
11470 simplify_gen_subreg (reg_mode
, src
, mode
,
11471 i
* reg_mode_size
)));
11477 bool used_update
= false;
11479 if (MEM_P (src
) && INT_REGNO_P (reg
))
11483 if (GET_CODE (XEXP (src
, 0)) == PRE_INC
11484 || GET_CODE (XEXP (src
, 0)) == PRE_DEC
)
11487 breg
= XEXP (XEXP (src
, 0), 0);
11488 delta_rtx
= GET_CODE (XEXP (src
, 0)) == PRE_INC
11489 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src
)))
11490 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src
)));
11491 emit_insn (TARGET_32BIT
11492 ? gen_addsi3 (breg
, breg
, delta_rtx
)
11493 : gen_adddi3 (breg
, breg
, delta_rtx
));
11494 src
= gen_rtx_MEM (mode
, breg
);
11496 else if (! offsettable_memref_p (src
))
11498 rtx newsrc
, basereg
;
11499 basereg
= gen_rtx_REG (Pmode
, reg
);
11500 emit_insn (gen_rtx_SET (VOIDmode
, basereg
, XEXP (src
, 0)));
11501 newsrc
= gen_rtx_MEM (GET_MODE (src
), basereg
);
11502 MEM_COPY_ATTRIBUTES (newsrc
, src
);
11506 breg
= XEXP (src
, 0);
11507 if (GET_CODE (breg
) == PLUS
|| GET_CODE (breg
) == LO_SUM
)
11508 breg
= XEXP (breg
, 0);
11510 /* If the base register we are using to address memory is
11511 also a destination reg, then change that register last. */
11513 && REGNO (breg
) >= REGNO (dst
)
11514 && REGNO (breg
) < REGNO (dst
) + nregs
)
11515 j
= REGNO (breg
) - REGNO (dst
);
11518 if (GET_CODE (dst
) == MEM
&& INT_REGNO_P (reg
))
11522 if (GET_CODE (XEXP (dst
, 0)) == PRE_INC
11523 || GET_CODE (XEXP (dst
, 0)) == PRE_DEC
)
11526 breg
= XEXP (XEXP (dst
, 0), 0);
11527 delta_rtx
= GET_CODE (XEXP (dst
, 0)) == PRE_INC
11528 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst
)))
11529 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst
)));
11531 /* We have to update the breg before doing the store.
11532 Use store with update, if available. */
11536 rtx nsrc
= simplify_gen_subreg (reg_mode
, src
, mode
, 0);
11537 emit_insn (TARGET_32BIT
11538 ? (TARGET_POWERPC64
11539 ? gen_movdi_si_update (breg
, breg
, delta_rtx
, nsrc
)
11540 : gen_movsi_update (breg
, breg
, delta_rtx
, nsrc
))
11541 : gen_movdi_di_update (breg
, breg
, delta_rtx
, nsrc
));
11542 used_update
= true;
11545 emit_insn (TARGET_32BIT
11546 ? gen_addsi3 (breg
, breg
, delta_rtx
)
11547 : gen_adddi3 (breg
, breg
, delta_rtx
));
11548 dst
= gen_rtx_MEM (mode
, breg
);
11550 else if (! offsettable_memref_p (dst
))
11554 for (i
= 0; i
< nregs
; i
++)
11556 /* Calculate index to next subword. */
11561 /* If compiler already emitted move of first word by
11562 store with update, no need to do anything. */
11563 if (j
== 0 && used_update
)
11566 emit_insn (gen_rtx_SET (VOIDmode
,
11567 simplify_gen_subreg (reg_mode
, dst
, mode
,
11568 j
* reg_mode_size
),
11569 simplify_gen_subreg (reg_mode
, src
, mode
,
11570 j
* reg_mode_size
)));
11576 /* This page contains routines that are used to determine what the
11577 function prologue and epilogue code will do and write them out. */
11579 /* Return the first fixed-point register that is required to be
11580 saved. 32 if none. */
11583 first_reg_to_save (void)
11587 /* Find lowest numbered live register. */
11588 for (first_reg
= 13; first_reg
<= 31; first_reg
++)
11589 if (regs_ever_live
[first_reg
]
11590 && (! call_used_regs
[first_reg
]
11591 || (first_reg
== RS6000_PIC_OFFSET_TABLE_REGNUM
11592 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
11593 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
)
11594 || (TARGET_TOC
&& TARGET_MINIMAL_TOC
)))))
11599 && current_function_uses_pic_offset_table
11600 && first_reg
> RS6000_PIC_OFFSET_TABLE_REGNUM
)
11601 return RS6000_PIC_OFFSET_TABLE_REGNUM
;
11607 /* Similar, for FP regs. */
11610 first_fp_reg_to_save (void)
11614 /* Find lowest numbered live register. */
11615 for (first_reg
= 14 + 32; first_reg
<= 63; first_reg
++)
11616 if (regs_ever_live
[first_reg
])
11622 /* Similar, for AltiVec regs. */
11625 first_altivec_reg_to_save (void)
11629 /* Stack frame remains as is unless we are in AltiVec ABI. */
11630 if (! TARGET_ALTIVEC_ABI
)
11631 return LAST_ALTIVEC_REGNO
+ 1;
11633 /* Find lowest numbered live register. */
11634 for (i
= FIRST_ALTIVEC_REGNO
+ 20; i
<= LAST_ALTIVEC_REGNO
; ++i
)
11635 if (regs_ever_live
[i
])
11641 /* Return a 32-bit mask of the AltiVec registers we need to set in
11642 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
11643 the 32-bit word is 0. */
11645 static unsigned int
11646 compute_vrsave_mask (void)
11648 unsigned int i
, mask
= 0;
11650 /* First, find out if we use _any_ altivec registers. */
11651 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
11652 if (regs_ever_live
[i
])
11653 mask
|= ALTIVEC_REG_BIT (i
);
11658 /* Next, remove the argument registers from the set. These must
11659 be in the VRSAVE mask set by the caller, so we don't need to add
11660 them in again. More importantly, the mask we compute here is
11661 used to generate CLOBBERs in the set_vrsave insn, and we do not
11662 wish the argument registers to die. */
11663 for (i
= cfun
->args_info
.vregno
- 1; i
>= ALTIVEC_ARG_MIN_REG
; --i
)
11664 mask
&= ~ALTIVEC_REG_BIT (i
);
11666 /* Similarly, remove the return value from the set. */
11669 diddle_return_value (is_altivec_return_reg
, &yes
);
11671 mask
&= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN
);
11677 /* For a very restricted set of circumstances, we can cut down the
11678 size of prologs/epilogs by calling our own save/restore-the-world
11682 compute_save_world_info(rs6000_stack_t
*info_ptr
)
11684 info_ptr
->world_save_p
=
11685 (DEFAULT_ABI
== ABI_DARWIN
)
11686 && ! (current_function_calls_setjmp
&& flag_exceptions
)
11687 && info_ptr
->first_fp_reg_save
== FIRST_SAVED_FP_REGNO
11688 && info_ptr
->first_gp_reg_save
== FIRST_SAVED_GP_REGNO
11689 && info_ptr
->first_altivec_reg_save
== FIRST_SAVED_ALTIVEC_REGNO
11690 && info_ptr
->cr_save_p
;
11692 /* This will not work in conjunction with sibcalls. Make sure there
11693 are none. (This check is expensive, but seldom executed.) */
11694 if ( info_ptr
->world_save_p
)
11697 for ( insn
= get_last_insn_anywhere (); insn
; insn
= PREV_INSN (insn
))
11698 if ( GET_CODE (insn
) == CALL_INSN
11699 && SIBLING_CALL_P (insn
))
11701 info_ptr
->world_save_p
= 0;
11706 if (info_ptr
->world_save_p
)
11708 /* Even if we're not touching VRsave, make sure there's room on the
11709 stack for it, if it looks like we're calling SAVE_WORLD, which
11710 will attempt to save it. */
11711 info_ptr
->vrsave_size
= 4;
11713 /* "Save" the VRsave register too if we're saving the world. */
11714 if (info_ptr
->vrsave_mask
== 0)
11715 info_ptr
->vrsave_mask
= compute_vrsave_mask ();
11717 /* Because the Darwin register save/restore routines only handle
11718 F14 .. F31 and V20 .. V31 as per the ABI, perform a consistency
11719 check and abort if there's something worng. */
11720 if (info_ptr
->first_fp_reg_save
< FIRST_SAVED_FP_REGNO
11721 || info_ptr
->first_altivec_reg_save
< FIRST_SAVED_ALTIVEC_REGNO
)
11729 is_altivec_return_reg (rtx reg
, void *xyes
)
11731 bool *yes
= (bool *) xyes
;
11732 if (REGNO (reg
) == ALTIVEC_ARG_RETURN
)
11737 /* Calculate the stack information for the current function. This is
11738 complicated by having two separate calling sequences, the AIX calling
11739 sequence and the V.4 calling sequence.
11741 AIX (and Darwin/Mac OS X) stack frames look like:
11743 SP----> +---------------------------------------+
11744 | back chain to caller | 0 0
11745 +---------------------------------------+
11746 | saved CR | 4 8 (8-11)
11747 +---------------------------------------+
11749 +---------------------------------------+
11750 | reserved for compilers | 12 24
11751 +---------------------------------------+
11752 | reserved for binders | 16 32
11753 +---------------------------------------+
11754 | saved TOC pointer | 20 40
11755 +---------------------------------------+
11756 | Parameter save area (P) | 24 48
11757 +---------------------------------------+
11758 | Alloca space (A) | 24+P etc.
11759 +---------------------------------------+
11760 | Local variable space (L) | 24+P+A
11761 +---------------------------------------+
11762 | Float/int conversion temporary (X) | 24+P+A+L
11763 +---------------------------------------+
11764 | Save area for AltiVec registers (W) | 24+P+A+L+X
11765 +---------------------------------------+
11766 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
11767 +---------------------------------------+
11768 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
11769 +---------------------------------------+
11770 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
11771 +---------------------------------------+
11772 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
11773 +---------------------------------------+
11774 old SP->| back chain to caller's caller |
11775 +---------------------------------------+
11777 The required alignment for AIX configurations is two words (i.e., 8
11781 V.4 stack frames look like:
11783 SP----> +---------------------------------------+
11784 | back chain to caller | 0
11785 +---------------------------------------+
11786 | caller's saved LR | 4
11787 +---------------------------------------+
11788 | Parameter save area (P) | 8
11789 +---------------------------------------+
11790 | Alloca space (A) | 8+P
11791 +---------------------------------------+
11792 | Varargs save area (V) | 8+P+A
11793 +---------------------------------------+
11794 | Local variable space (L) | 8+P+A+V
11795 +---------------------------------------+
11796 | Float/int conversion temporary (X) | 8+P+A+V+L
11797 +---------------------------------------+
11798 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
11799 +---------------------------------------+
11800 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
11801 +---------------------------------------+
11802 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
11803 +---------------------------------------+
11804 | SPE: area for 64-bit GP registers |
11805 +---------------------------------------+
11806 | SPE alignment padding |
11807 +---------------------------------------+
11808 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
11809 +---------------------------------------+
11810 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
11811 +---------------------------------------+
11812 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
11813 +---------------------------------------+
11814 old SP->| back chain to caller's caller |
11815 +---------------------------------------+
11817 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
11818 given. (But note below and in sysv4.h that we require only 8 and
11819 may round up the size of our stack frame anyways. The historical
11820 reason is early versions of powerpc-linux which didn't properly
11821 align the stack at program startup. A happy side-effect is that
11822 -mno-eabi libraries can be used with -meabi programs.)
11824 The EABI configuration defaults to the V.4 layout. However,
11825 the stack alignment requirements may differ. If -mno-eabi is not
11826 given, the required stack alignment is 8 bytes; if -mno-eabi is
11827 given, the required alignment is 16 bytes. (But see V.4 comment
11830 #ifndef ABI_STACK_BOUNDARY
11831 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
11834 static rs6000_stack_t
*
11835 rs6000_stack_info (void)
11837 static rs6000_stack_t info
, zero_info
;
11838 rs6000_stack_t
*info_ptr
= &info
;
11839 int reg_size
= TARGET_32BIT
? 4 : 8;
11842 HOST_WIDE_INT non_fixed_size
;
11844 /* Zero all fields portably. */
11849 /* Cache value so we don't rescan instruction chain over and over. */
11850 if (cfun
->machine
->insn_chain_scanned_p
== 0)
11852 cfun
->machine
->insn_chain_scanned_p
= 1;
11853 info_ptr
->spe_64bit_regs_used
= (int) spe_func_has_64bit_regs_p ();
11857 /* Select which calling sequence. */
11858 info_ptr
->abi
= DEFAULT_ABI
;
11860 /* Calculate which registers need to be saved & save area size. */
11861 info_ptr
->first_gp_reg_save
= first_reg_to_save ();
11862 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
11863 even if it currently looks like we won't. */
11864 if (((TARGET_TOC
&& TARGET_MINIMAL_TOC
)
11865 || (flag_pic
== 1 && DEFAULT_ABI
== ABI_V4
)
11866 || (flag_pic
&& DEFAULT_ABI
== ABI_DARWIN
))
11867 && info_ptr
->first_gp_reg_save
> RS6000_PIC_OFFSET_TABLE_REGNUM
)
11868 info_ptr
->gp_size
= reg_size
* (32 - RS6000_PIC_OFFSET_TABLE_REGNUM
);
11870 info_ptr
->gp_size
= reg_size
* (32 - info_ptr
->first_gp_reg_save
);
11872 /* For the SPE, we have an additional upper 32-bits on each GPR.
11873 Ideally we should save the entire 64-bits only when the upper
11874 half is used in SIMD instructions. Since we only record
11875 registers live (not the size they are used in), this proves
11876 difficult because we'd have to traverse the instruction chain at
11877 the right time, taking reload into account. This is a real pain,
11878 so we opt to save the GPRs in 64-bits always if but one register
11879 gets used in 64-bits. Otherwise, all the registers in the frame
11880 get saved in 32-bits.
11882 So... since when we save all GPRs (except the SP) in 64-bits, the
11883 traditional GP save area will be empty. */
11884 if (TARGET_SPE_ABI
&& info_ptr
->spe_64bit_regs_used
!= 0)
11885 info_ptr
->gp_size
= 0;
11887 info_ptr
->first_fp_reg_save
= first_fp_reg_to_save ();
11888 info_ptr
->fp_size
= 8 * (64 - info_ptr
->first_fp_reg_save
);
11890 info_ptr
->first_altivec_reg_save
= first_altivec_reg_to_save ();
11891 info_ptr
->altivec_size
= 16 * (LAST_ALTIVEC_REGNO
+ 1
11892 - info_ptr
->first_altivec_reg_save
);
11894 /* Does this function call anything? */
11895 info_ptr
->calls_p
= (! current_function_is_leaf
11896 || cfun
->machine
->ra_needs_full_frame
);
11898 /* Determine if we need to save the link register. */
11899 if (rs6000_ra_ever_killed ()
11900 || (DEFAULT_ABI
== ABI_AIX
11901 && current_function_profile
11902 && !TARGET_PROFILE_KERNEL
)
11903 #ifdef TARGET_RELOCATABLE
11904 || (TARGET_RELOCATABLE
&& (get_pool_size () != 0))
11906 || (info_ptr
->first_fp_reg_save
!= 64
11907 && !FP_SAVE_INLINE (info_ptr
->first_fp_reg_save
))
11908 || info_ptr
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
11909 || (DEFAULT_ABI
== ABI_V4
&& current_function_calls_alloca
)
11910 || (DEFAULT_ABI
== ABI_DARWIN
11912 && current_function_uses_pic_offset_table
)
11913 || info_ptr
->calls_p
)
11915 info_ptr
->lr_save_p
= 1;
11916 regs_ever_live
[LINK_REGISTER_REGNUM
] = 1;
11919 /* Determine if we need to save the condition code registers. */
11920 if (regs_ever_live
[CR2_REGNO
]
11921 || regs_ever_live
[CR3_REGNO
]
11922 || regs_ever_live
[CR4_REGNO
])
11924 info_ptr
->cr_save_p
= 1;
11925 if (DEFAULT_ABI
== ABI_V4
)
11926 info_ptr
->cr_size
= reg_size
;
11929 /* If the current function calls __builtin_eh_return, then we need
11930 to allocate stack space for registers that will hold data for
11931 the exception handler. */
11932 if (current_function_calls_eh_return
)
11935 for (i
= 0; EH_RETURN_DATA_REGNO (i
) != INVALID_REGNUM
; ++i
)
11938 /* SPE saves EH registers in 64-bits. */
11939 ehrd_size
= i
* (TARGET_SPE_ABI
11940 && info_ptr
->spe_64bit_regs_used
!= 0
11941 ? UNITS_PER_SPE_WORD
: UNITS_PER_WORD
);
11946 /* Determine various sizes. */
11947 info_ptr
->reg_size
= reg_size
;
11948 info_ptr
->fixed_size
= RS6000_SAVE_AREA
;
11949 info_ptr
->varargs_size
= RS6000_VARARGS_AREA
;
11950 info_ptr
->vars_size
= RS6000_ALIGN (get_frame_size (), 8);
11951 info_ptr
->parm_size
= RS6000_ALIGN (current_function_outgoing_args_size
,
11952 TARGET_ALTIVEC
? 16 : 8);
11954 if (TARGET_SPE_ABI
&& info_ptr
->spe_64bit_regs_used
!= 0)
11955 info_ptr
->spe_gp_size
= 8 * (32 - info_ptr
->first_gp_reg_save
);
11957 info_ptr
->spe_gp_size
= 0;
11959 if (TARGET_ALTIVEC_ABI
)
11960 info_ptr
->vrsave_mask
= compute_vrsave_mask ();
11962 info_ptr
->vrsave_mask
= 0;
11964 if (TARGET_ALTIVEC_VRSAVE
&& info_ptr
->vrsave_mask
)
11965 info_ptr
->vrsave_size
= 4;
11967 info_ptr
->vrsave_size
= 0;
11969 compute_save_world_info (info_ptr
);
11971 /* Calculate the offsets. */
11972 switch (DEFAULT_ABI
)
11980 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
11981 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
11983 if (TARGET_ALTIVEC_ABI
)
11985 info_ptr
->vrsave_save_offset
11986 = info_ptr
->gp_save_offset
- info_ptr
->vrsave_size
;
11988 /* Align stack so vector save area is on a quadword boundary. */
11989 if (info_ptr
->altivec_size
!= 0)
11990 info_ptr
->altivec_padding_size
11991 = 16 - (-info_ptr
->vrsave_save_offset
% 16);
11993 info_ptr
->altivec_padding_size
= 0;
11995 info_ptr
->altivec_save_offset
11996 = info_ptr
->vrsave_save_offset
11997 - info_ptr
->altivec_padding_size
11998 - info_ptr
->altivec_size
;
12000 /* Adjust for AltiVec case. */
12001 info_ptr
->ehrd_offset
= info_ptr
->altivec_save_offset
- ehrd_size
;
12004 info_ptr
->ehrd_offset
= info_ptr
->gp_save_offset
- ehrd_size
;
12005 info_ptr
->cr_save_offset
= reg_size
; /* first word when 64-bit. */
12006 info_ptr
->lr_save_offset
= 2*reg_size
;
12010 info_ptr
->fp_save_offset
= - info_ptr
->fp_size
;
12011 info_ptr
->gp_save_offset
= info_ptr
->fp_save_offset
- info_ptr
->gp_size
;
12012 info_ptr
->cr_save_offset
= info_ptr
->gp_save_offset
- info_ptr
->cr_size
;
12014 if (TARGET_SPE_ABI
&& info_ptr
->spe_64bit_regs_used
!= 0)
12016 /* Align stack so SPE GPR save area is aligned on a
12017 double-word boundary. */
12018 if (info_ptr
->spe_gp_size
!= 0)
12019 info_ptr
->spe_padding_size
12020 = 8 - (-info_ptr
->cr_save_offset
% 8);
12022 info_ptr
->spe_padding_size
= 0;
12024 info_ptr
->spe_gp_save_offset
12025 = info_ptr
->cr_save_offset
12026 - info_ptr
->spe_padding_size
12027 - info_ptr
->spe_gp_size
;
12029 /* Adjust for SPE case. */
12030 info_ptr
->toc_save_offset
12031 = info_ptr
->spe_gp_save_offset
- info_ptr
->toc_size
;
12033 else if (TARGET_ALTIVEC_ABI
)
12035 info_ptr
->vrsave_save_offset
12036 = info_ptr
->cr_save_offset
- info_ptr
->vrsave_size
;
12038 /* Align stack so vector save area is on a quadword boundary. */
12039 if (info_ptr
->altivec_size
!= 0)
12040 info_ptr
->altivec_padding_size
12041 = 16 - (-info_ptr
->vrsave_save_offset
% 16);
12043 info_ptr
->altivec_padding_size
= 0;
12045 info_ptr
->altivec_save_offset
12046 = info_ptr
->vrsave_save_offset
12047 - info_ptr
->altivec_padding_size
12048 - info_ptr
->altivec_size
;
12050 /* Adjust for AltiVec case. */
12051 info_ptr
->toc_save_offset
12052 = info_ptr
->altivec_save_offset
- info_ptr
->toc_size
;
12055 info_ptr
->toc_save_offset
= info_ptr
->cr_save_offset
- info_ptr
->toc_size
;
12056 info_ptr
->ehrd_offset
= info_ptr
->toc_save_offset
- ehrd_size
;
12057 info_ptr
->lr_save_offset
= reg_size
;
12061 save_align
= (TARGET_ALTIVEC_ABI
|| DEFAULT_ABI
== ABI_DARWIN
) ? 16 : 8;
12062 info_ptr
->save_size
= RS6000_ALIGN (info_ptr
->fp_size
12063 + info_ptr
->gp_size
12064 + info_ptr
->altivec_size
12065 + info_ptr
->altivec_padding_size
12066 + info_ptr
->spe_gp_size
12067 + info_ptr
->spe_padding_size
12069 + info_ptr
->cr_size
12070 + info_ptr
->lr_size
12071 + info_ptr
->vrsave_size
12072 + info_ptr
->toc_size
,
12075 non_fixed_size
= (info_ptr
->vars_size
12076 + info_ptr
->parm_size
12077 + info_ptr
->save_size
12078 + info_ptr
->varargs_size
);
12080 info_ptr
->total_size
= RS6000_ALIGN (non_fixed_size
+ info_ptr
->fixed_size
,
12081 ABI_STACK_BOUNDARY
/ BITS_PER_UNIT
);
12083 /* Determine if we need to allocate any stack frame:
12085 For AIX we need to push the stack if a frame pointer is needed
12086 (because the stack might be dynamically adjusted), if we are
12087 debugging, if we make calls, or if the sum of fp_save, gp_save,
12088 and local variables are more than the space needed to save all
12089 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
12090 + 18*8 = 288 (GPR13 reserved).
12092 For V.4 we don't have the stack cushion that AIX uses, but assume
12093 that the debugger can handle stackless frames. */
12095 if (info_ptr
->calls_p
)
12096 info_ptr
->push_p
= 1;
12098 else if (DEFAULT_ABI
== ABI_V4
)
12099 info_ptr
->push_p
= non_fixed_size
!= 0;
12101 else if (frame_pointer_needed
)
12102 info_ptr
->push_p
= 1;
12104 else if (TARGET_XCOFF
&& write_symbols
!= NO_DEBUG
)
12105 info_ptr
->push_p
= 1;
12108 info_ptr
->push_p
= non_fixed_size
> (TARGET_32BIT
? 220 : 288);
12110 /* Zero offsets if we're not saving those registers. */
12111 if (info_ptr
->fp_size
== 0)
12112 info_ptr
->fp_save_offset
= 0;
12114 if (info_ptr
->gp_size
== 0)
12115 info_ptr
->gp_save_offset
= 0;
12117 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->altivec_size
== 0)
12118 info_ptr
->altivec_save_offset
= 0;
12120 if (! TARGET_ALTIVEC_ABI
|| info_ptr
->vrsave_mask
== 0)
12121 info_ptr
->vrsave_save_offset
= 0;
12123 if (! TARGET_SPE_ABI
12124 || info_ptr
->spe_64bit_regs_used
== 0
12125 || info_ptr
->spe_gp_size
== 0)
12126 info_ptr
->spe_gp_save_offset
= 0;
12128 if (! info_ptr
->lr_save_p
)
12129 info_ptr
->lr_save_offset
= 0;
12131 if (! info_ptr
->cr_save_p
)
12132 info_ptr
->cr_save_offset
= 0;
12134 if (! info_ptr
->toc_save_p
)
12135 info_ptr
->toc_save_offset
= 0;
12140 /* Return true if the current function uses any GPRs in 64-bit SIMD
12144 spe_func_has_64bit_regs_p (void)
12148 /* Functions that save and restore all the call-saved registers will
12149 need to save/restore the registers in 64-bits. */
12150 if (current_function_calls_eh_return
12151 || current_function_calls_setjmp
12152 || current_function_has_nonlocal_goto
)
12155 insns
= get_insns ();
12157 for (insn
= NEXT_INSN (insns
); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
12163 i
= PATTERN (insn
);
12164 if (GET_CODE (i
) == SET
12165 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i
))))
12174 debug_stack_info (rs6000_stack_t
*info
)
12176 const char *abi_string
;
12179 info
= rs6000_stack_info ();
12181 fprintf (stderr
, "\nStack information for function %s:\n",
12182 ((current_function_decl
&& DECL_NAME (current_function_decl
))
12183 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl
))
12188 default: abi_string
= "Unknown"; break;
12189 case ABI_NONE
: abi_string
= "NONE"; break;
12190 case ABI_AIX
: abi_string
= "AIX"; break;
12191 case ABI_DARWIN
: abi_string
= "Darwin"; break;
12192 case ABI_V4
: abi_string
= "V.4"; break;
12195 fprintf (stderr
, "\tABI = %5s\n", abi_string
);
12197 if (TARGET_ALTIVEC_ABI
)
12198 fprintf (stderr
, "\tALTIVEC ABI extensions enabled.\n");
12200 if (TARGET_SPE_ABI
)
12201 fprintf (stderr
, "\tSPE ABI extensions enabled.\n");
12203 if (info
->first_gp_reg_save
!= 32)
12204 fprintf (stderr
, "\tfirst_gp_reg_save = %5d\n", info
->first_gp_reg_save
);
12206 if (info
->first_fp_reg_save
!= 64)
12207 fprintf (stderr
, "\tfirst_fp_reg_save = %5d\n", info
->first_fp_reg_save
);
12209 if (info
->first_altivec_reg_save
<= LAST_ALTIVEC_REGNO
)
12210 fprintf (stderr
, "\tfirst_altivec_reg_save = %5d\n",
12211 info
->first_altivec_reg_save
);
12213 if (info
->lr_save_p
)
12214 fprintf (stderr
, "\tlr_save_p = %5d\n", info
->lr_save_p
);
12216 if (info
->cr_save_p
)
12217 fprintf (stderr
, "\tcr_save_p = %5d\n", info
->cr_save_p
);
12219 if (info
->toc_save_p
)
12220 fprintf (stderr
, "\ttoc_save_p = %5d\n", info
->toc_save_p
);
12222 if (info
->vrsave_mask
)
12223 fprintf (stderr
, "\tvrsave_mask = 0x%x\n", info
->vrsave_mask
);
12226 fprintf (stderr
, "\tpush_p = %5d\n", info
->push_p
);
12229 fprintf (stderr
, "\tcalls_p = %5d\n", info
->calls_p
);
12231 if (info
->gp_save_offset
)
12232 fprintf (stderr
, "\tgp_save_offset = %5d\n", info
->gp_save_offset
);
12234 if (info
->fp_save_offset
)
12235 fprintf (stderr
, "\tfp_save_offset = %5d\n", info
->fp_save_offset
);
12237 if (info
->altivec_save_offset
)
12238 fprintf (stderr
, "\taltivec_save_offset = %5d\n",
12239 info
->altivec_save_offset
);
12241 if (info
->spe_gp_save_offset
)
12242 fprintf (stderr
, "\tspe_gp_save_offset = %5d\n",
12243 info
->spe_gp_save_offset
);
12245 if (info
->vrsave_save_offset
)
12246 fprintf (stderr
, "\tvrsave_save_offset = %5d\n",
12247 info
->vrsave_save_offset
);
12249 if (info
->lr_save_offset
)
12250 fprintf (stderr
, "\tlr_save_offset = %5d\n", info
->lr_save_offset
);
12252 if (info
->cr_save_offset
)
12253 fprintf (stderr
, "\tcr_save_offset = %5d\n", info
->cr_save_offset
);
12255 if (info
->toc_save_offset
)
12256 fprintf (stderr
, "\ttoc_save_offset = %5d\n", info
->toc_save_offset
);
12258 if (info
->varargs_save_offset
)
12259 fprintf (stderr
, "\tvarargs_save_offset = %5d\n", info
->varargs_save_offset
);
12261 if (info
->total_size
)
12262 fprintf (stderr
, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC
"\n",
12265 if (info
->varargs_size
)
12266 fprintf (stderr
, "\tvarargs_size = %5d\n", info
->varargs_size
);
12268 if (info
->vars_size
)
12269 fprintf (stderr
, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC
"\n",
12272 if (info
->parm_size
)
12273 fprintf (stderr
, "\tparm_size = %5d\n", info
->parm_size
);
12275 if (info
->fixed_size
)
12276 fprintf (stderr
, "\tfixed_size = %5d\n", info
->fixed_size
);
12279 fprintf (stderr
, "\tgp_size = %5d\n", info
->gp_size
);
12281 if (info
->spe_gp_size
)
12282 fprintf (stderr
, "\tspe_gp_size = %5d\n", info
->spe_gp_size
);
12285 fprintf (stderr
, "\tfp_size = %5d\n", info
->fp_size
);
12287 if (info
->altivec_size
)
12288 fprintf (stderr
, "\taltivec_size = %5d\n", info
->altivec_size
);
12290 if (info
->vrsave_size
)
12291 fprintf (stderr
, "\tvrsave_size = %5d\n", info
->vrsave_size
);
12293 if (info
->altivec_padding_size
)
12294 fprintf (stderr
, "\taltivec_padding_size= %5d\n",
12295 info
->altivec_padding_size
);
12297 if (info
->spe_padding_size
)
12298 fprintf (stderr
, "\tspe_padding_size = %5d\n",
12299 info
->spe_padding_size
);
12302 fprintf (stderr
, "\tlr_size = %5d\n", info
->lr_size
);
12305 fprintf (stderr
, "\tcr_size = %5d\n", info
->cr_size
);
12307 if (info
->toc_size
)
12308 fprintf (stderr
, "\ttoc_size = %5d\n", info
->toc_size
);
12310 if (info
->save_size
)
12311 fprintf (stderr
, "\tsave_size = %5d\n", info
->save_size
);
12313 if (info
->reg_size
!= 4)
12314 fprintf (stderr
, "\treg_size = %5d\n", info
->reg_size
);
12316 fprintf (stderr
, "\n");
12320 rs6000_return_addr (int count
, rtx frame
)
12322 /* Currently we don't optimize very well between prolog and body
12323 code and for PIC code the code can be actually quite bad, so
12324 don't try to be too clever here. */
12325 if (count
!= 0 || (DEFAULT_ABI
!= ABI_AIX
&& flag_pic
))
12327 cfun
->machine
->ra_needs_full_frame
= 1;
12334 plus_constant (copy_to_reg
12335 (gen_rtx_MEM (Pmode
,
12336 memory_address (Pmode
, frame
))),
12337 RETURN_ADDRESS_OFFSET
)));
12340 cfun
->machine
->ra_need_lr
= 1;
12341 return get_hard_reg_initial_val (Pmode
, LINK_REGISTER_REGNUM
);
12344 /* Say whether a function is a candidate for sibcall handling or not.
12345 We do not allow indirect calls to be optimized into sibling calls.
12346 Also, we can't do it if there are any vector parameters; there's
12347 nowhere to put the VRsave code so it works; note that functions with
12348 vector parameters are required to have a prototype, so the argument
12349 type info must be available here. (The tail recursion case can work
12350 with vector parameters, but there's no way to distinguish here.) */
12352 rs6000_function_ok_for_sibcall (tree decl
, tree exp ATTRIBUTE_UNUSED
)
12357 if (TARGET_ALTIVEC_VRSAVE
)
12359 for (type
= TYPE_ARG_TYPES (TREE_TYPE (decl
));
12360 type
; type
= TREE_CHAIN (type
))
12362 if (TREE_CODE (TREE_VALUE (type
)) == VECTOR_TYPE
)
12366 if (DEFAULT_ABI
== ABI_DARWIN
12367 || (*targetm
.binds_local_p
) (decl
))
12369 tree attr_list
= TYPE_ATTRIBUTES (TREE_TYPE (decl
));
12371 if (!lookup_attribute ("longcall", attr_list
)
12372 || lookup_attribute ("shortcall", attr_list
))
12380 rs6000_ra_ever_killed (void)
12386 if (current_function_is_thunk
)
12389 /* regs_ever_live has LR marked as used if any sibcalls are present,
12390 but this should not force saving and restoring in the
12391 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
12392 clobbers LR, so that is inappropriate. */
12394 /* Also, the prologue can generate a store into LR that
12395 doesn't really count, like this:
12398 bcl to set PIC register
12402 When we're called from the epilogue, we need to avoid counting
12403 this as a store. */
12405 push_topmost_sequence ();
12406 top
= get_insns ();
12407 pop_topmost_sequence ();
12408 reg
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
12410 for (insn
= NEXT_INSN (top
); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
12414 if (FIND_REG_INC_NOTE (insn
, reg
))
12416 else if (GET_CODE (insn
) == CALL_INSN
12417 && !SIBLING_CALL_P (insn
))
12419 else if (set_of (reg
, insn
) != NULL_RTX
12420 && !prologue_epilogue_contains (insn
))
12427 /* Add a REG_MAYBE_DEAD note to the insn. */
12429 rs6000_maybe_dead (rtx insn
)
12431 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
,
12436 /* Emit instructions needed to load the TOC register.
12437 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
12438 a constant pool; or for SVR4 -fpic. */
12441 rs6000_emit_load_toc_table (int fromprolog
)
12444 dest
= gen_rtx_REG (Pmode
, RS6000_PIC_OFFSET_TABLE_REGNUM
);
12446 if (TARGET_ELF
&& DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1)
12448 rtx temp
= (fromprolog
12449 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
12450 : gen_reg_rtx (Pmode
));
12451 insn
= emit_insn (gen_load_toc_v4_pic_si (temp
));
12453 rs6000_maybe_dead (insn
);
12454 insn
= emit_move_insn (dest
, temp
);
12456 rs6000_maybe_dead (insn
);
12458 else if (TARGET_ELF
&& DEFAULT_ABI
!= ABI_AIX
&& flag_pic
== 2)
12461 rtx tempLR
= (fromprolog
12462 ? gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
)
12463 : gen_reg_rtx (Pmode
));
12464 rtx temp0
= (fromprolog
12465 ? gen_rtx_REG (Pmode
, 0)
12466 : gen_reg_rtx (Pmode
));
12469 /* possibly create the toc section */
12470 if (! toc_initialized
)
12473 function_section (current_function_decl
);
12480 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCF", rs6000_pic_labelno
);
12481 symF
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
12483 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCL", rs6000_pic_labelno
);
12484 symL
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
12486 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR
,
12488 rs6000_maybe_dead (emit_move_insn (dest
, tempLR
));
12489 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0
, dest
,
12496 static int reload_toc_labelno
= 0;
12498 tocsym
= gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
);
12500 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCG", reload_toc_labelno
++);
12501 symF
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
12503 emit_insn (gen_load_toc_v4_PIC_1b (tempLR
, symF
, tocsym
));
12504 emit_move_insn (dest
, tempLR
);
12505 emit_move_insn (temp0
, gen_rtx_MEM (Pmode
, dest
));
12507 insn
= emit_insn (gen_addsi3 (dest
, temp0
, dest
));
12509 rs6000_maybe_dead (insn
);
12511 else if (TARGET_ELF
&& !TARGET_AIX
&& flag_pic
== 0 && TARGET_MINIMAL_TOC
)
12513 /* This is for AIX code running in non-PIC ELF32. */
12516 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCTOC", 1);
12517 realsym
= gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (buf
));
12519 insn
= emit_insn (gen_elf_high (dest
, realsym
));
12521 rs6000_maybe_dead (insn
);
12522 insn
= emit_insn (gen_elf_low (dest
, dest
, realsym
));
12524 rs6000_maybe_dead (insn
);
12526 else if (DEFAULT_ABI
== ABI_AIX
)
12529 insn
= emit_insn (gen_load_toc_aix_si (dest
));
12531 insn
= emit_insn (gen_load_toc_aix_di (dest
));
12533 rs6000_maybe_dead (insn
);
12539 /* Emit instructions to restore the link register after determining where
12540 its value has been stored. */
12543 rs6000_emit_eh_reg_restore (rtx source
, rtx scratch
)
12545 rs6000_stack_t
*info
= rs6000_stack_info ();
12548 operands
[0] = source
;
12549 operands
[1] = scratch
;
12551 if (info
->lr_save_p
)
12553 rtx frame_rtx
= stack_pointer_rtx
;
12554 HOST_WIDE_INT sp_offset
= 0;
12557 if (frame_pointer_needed
12558 || current_function_calls_alloca
12559 || info
->total_size
> 32767)
12561 emit_move_insn (operands
[1], gen_rtx_MEM (Pmode
, frame_rtx
));
12562 frame_rtx
= operands
[1];
12564 else if (info
->push_p
)
12565 sp_offset
= info
->total_size
;
12567 tmp
= plus_constant (frame_rtx
, info
->lr_save_offset
+ sp_offset
);
12568 tmp
= gen_rtx_MEM (Pmode
, tmp
);
12569 emit_move_insn (tmp
, operands
[0]);
12572 emit_move_insn (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
), operands
[0]);
12575 static GTY(()) int set
= -1;
12578 get_TOC_alias_set (void)
12581 set
= new_alias_set ();
12585 /* This returns nonzero if the current function uses the TOC. This is
12586 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
12587 is generated by the ABI_V4 load_toc_* patterns. */
12594 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
12597 rtx pat
= PATTERN (insn
);
12600 if (GET_CODE (pat
) == PARALLEL
)
12601 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
12603 rtx sub
= XVECEXP (pat
, 0, i
);
12604 if (GET_CODE (sub
) == USE
)
12606 sub
= XEXP (sub
, 0);
12607 if (GET_CODE (sub
) == UNSPEC
12608 && XINT (sub
, 1) == UNSPEC_TOC
)
12618 create_TOC_reference (rtx symbol
)
12620 return gen_rtx_PLUS (Pmode
,
12621 gen_rtx_REG (Pmode
, TOC_REGISTER
),
12622 gen_rtx_CONST (Pmode
,
12623 gen_rtx_MINUS (Pmode
, symbol
,
12624 gen_rtx_SYMBOL_REF (Pmode
, toc_label_name
))));
12627 /* If _Unwind_* has been called from within the same module,
12628 toc register is not guaranteed to be saved to 40(1) on function
12629 entry. Save it there in that case. */
12632 rs6000_aix_emit_builtin_unwind_init (void)
12635 rtx stack_top
= gen_reg_rtx (Pmode
);
12636 rtx opcode_addr
= gen_reg_rtx (Pmode
);
12637 rtx opcode
= gen_reg_rtx (SImode
);
12638 rtx tocompare
= gen_reg_rtx (SImode
);
12639 rtx no_toc_save_needed
= gen_label_rtx ();
12641 mem
= gen_rtx_MEM (Pmode
, hard_frame_pointer_rtx
);
12642 emit_move_insn (stack_top
, mem
);
12644 mem
= gen_rtx_MEM (Pmode
,
12645 gen_rtx_PLUS (Pmode
, stack_top
,
12646 GEN_INT (2 * GET_MODE_SIZE (Pmode
))));
12647 emit_move_insn (opcode_addr
, mem
);
12648 emit_move_insn (opcode
, gen_rtx_MEM (SImode
, opcode_addr
));
12649 emit_move_insn (tocompare
, gen_int_mode (TARGET_32BIT
? 0x80410014
12650 : 0xE8410028, SImode
));
12652 do_compare_rtx_and_jump (opcode
, tocompare
, EQ
, 1,
12653 SImode
, NULL_RTX
, NULL_RTX
,
12654 no_toc_save_needed
);
12656 mem
= gen_rtx_MEM (Pmode
,
12657 gen_rtx_PLUS (Pmode
, stack_top
,
12658 GEN_INT (5 * GET_MODE_SIZE (Pmode
))));
12659 emit_move_insn (mem
, gen_rtx_REG (Pmode
, 2));
12660 emit_label (no_toc_save_needed
);
12663 /* This ties together stack memory (MEM with an alias set of
12664 rs6000_sr_alias_set) and the change to the stack pointer. */
12667 rs6000_emit_stack_tie (void)
12669 rtx mem
= gen_rtx_MEM (BLKmode
, gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
));
12671 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
12672 emit_insn (gen_stack_tie (mem
));
12675 /* Emit the correct code for allocating stack space, as insns.
12676 If COPY_R12, make sure a copy of the old frame is left in r12.
12677 The generated code may use hard register 0 as a temporary. */
12680 rs6000_emit_allocate_stack (HOST_WIDE_INT size
, int copy_r12
)
12683 rtx stack_reg
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
12684 rtx tmp_reg
= gen_rtx_REG (Pmode
, 0);
12685 rtx todec
= GEN_INT (-size
);
12687 if (current_function_limit_stack
)
12689 if (REG_P (stack_limit_rtx
)
12690 && REGNO (stack_limit_rtx
) > 1
12691 && REGNO (stack_limit_rtx
) <= 31)
12693 emit_insn (TARGET_32BIT
12694 ? gen_addsi3 (tmp_reg
,
12697 : gen_adddi3 (tmp_reg
,
12701 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
12704 else if (GET_CODE (stack_limit_rtx
) == SYMBOL_REF
12706 && DEFAULT_ABI
== ABI_V4
)
12708 rtx toload
= gen_rtx_CONST (VOIDmode
,
12709 gen_rtx_PLUS (Pmode
,
12713 emit_insn (gen_elf_high (tmp_reg
, toload
));
12714 emit_insn (gen_elf_low (tmp_reg
, tmp_reg
, toload
));
12715 emit_insn (gen_cond_trap (LTU
, stack_reg
, tmp_reg
,
12719 warning ("stack limit expression is not supported");
12722 if (copy_r12
|| ! TARGET_UPDATE
)
12723 emit_move_insn (gen_rtx_REG (Pmode
, 12), stack_reg
);
12729 /* Need a note here so that try_split doesn't get confused. */
12730 if (get_last_insn() == NULL_RTX
)
12731 emit_note (NOTE_INSN_DELETED
);
12732 insn
= emit_move_insn (tmp_reg
, todec
);
12733 try_split (PATTERN (insn
), insn
, 0);
12737 insn
= emit_insn (TARGET_32BIT
12738 ? gen_movsi_update (stack_reg
, stack_reg
,
12740 : gen_movdi_di_update (stack_reg
, stack_reg
,
12741 todec
, stack_reg
));
12745 insn
= emit_insn (TARGET_32BIT
12746 ? gen_addsi3 (stack_reg
, stack_reg
, todec
)
12747 : gen_adddi3 (stack_reg
, stack_reg
, todec
));
12748 emit_move_insn (gen_rtx_MEM (Pmode
, stack_reg
),
12749 gen_rtx_REG (Pmode
, 12));
12752 RTX_FRAME_RELATED_P (insn
) = 1;
12754 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
12755 gen_rtx_SET (VOIDmode
, stack_reg
,
12756 gen_rtx_PLUS (Pmode
, stack_reg
,
12761 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
12762 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
12763 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
12764 deduce these equivalences by itself so it wasn't necessary to hold
12765 its hand so much. */
12768 rs6000_frame_related (rtx insn
, rtx reg
, HOST_WIDE_INT val
,
12769 rtx reg2
, rtx rreg
)
12773 /* copy_rtx will not make unique copies of registers, so we need to
12774 ensure we don't have unwanted sharing here. */
12776 reg
= gen_raw_REG (GET_MODE (reg
), REGNO (reg
));
12779 reg
= gen_raw_REG (GET_MODE (reg
), REGNO (reg
));
12781 real
= copy_rtx (PATTERN (insn
));
12783 if (reg2
!= NULL_RTX
)
12784 real
= replace_rtx (real
, reg2
, rreg
);
12786 real
= replace_rtx (real
, reg
,
12787 gen_rtx_PLUS (Pmode
, gen_rtx_REG (Pmode
,
12788 STACK_POINTER_REGNUM
),
12791 /* We expect that 'real' is either a SET or a PARALLEL containing
12792 SETs (and possibly other stuff). In a PARALLEL, all the SETs
12793 are important so they all have to be marked RTX_FRAME_RELATED_P. */
12795 if (GET_CODE (real
) == SET
)
12799 temp
= simplify_rtx (SET_SRC (set
));
12801 SET_SRC (set
) = temp
;
12802 temp
= simplify_rtx (SET_DEST (set
));
12804 SET_DEST (set
) = temp
;
12805 if (GET_CODE (SET_DEST (set
)) == MEM
)
12807 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
12809 XEXP (SET_DEST (set
), 0) = temp
;
12812 else if (GET_CODE (real
) == PARALLEL
)
12815 for (i
= 0; i
< XVECLEN (real
, 0); i
++)
12816 if (GET_CODE (XVECEXP (real
, 0, i
)) == SET
)
12818 rtx set
= XVECEXP (real
, 0, i
);
12820 temp
= simplify_rtx (SET_SRC (set
));
12822 SET_SRC (set
) = temp
;
12823 temp
= simplify_rtx (SET_DEST (set
));
12825 SET_DEST (set
) = temp
;
12826 if (GET_CODE (SET_DEST (set
)) == MEM
)
12828 temp
= simplify_rtx (XEXP (SET_DEST (set
), 0));
12830 XEXP (SET_DEST (set
), 0) = temp
;
12832 RTX_FRAME_RELATED_P (set
) = 1;
12839 real
= spe_synthesize_frame_save (real
);
12841 RTX_FRAME_RELATED_P (insn
) = 1;
12842 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
12847 /* Given an SPE frame note, return a PARALLEL of SETs with the
12848 original note, plus a synthetic register save. */
12851 spe_synthesize_frame_save (rtx real
)
12853 rtx synth
, offset
, reg
, real2
;
12855 if (GET_CODE (real
) != SET
12856 || GET_MODE (SET_SRC (real
)) != V2SImode
)
12859 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
12860 frame related note. The parallel contains a set of the register
12861 being saved, and another set to a synthetic register (n+1200).
12862 This is so we can differentiate between 64-bit and 32-bit saves.
12863 Words cannot describe this nastiness. */
12865 if (GET_CODE (SET_DEST (real
)) != MEM
12866 || GET_CODE (XEXP (SET_DEST (real
), 0)) != PLUS
12867 || GET_CODE (SET_SRC (real
)) != REG
)
12871 (set (mem (plus (reg x) (const y)))
12874 (set (mem (plus (reg x) (const y+4)))
12878 real2
= copy_rtx (real
);
12879 PUT_MODE (SET_DEST (real2
), SImode
);
12880 reg
= SET_SRC (real2
);
12881 real2
= replace_rtx (real2
, reg
, gen_rtx_REG (SImode
, REGNO (reg
)));
12882 synth
= copy_rtx (real2
);
12884 if (BYTES_BIG_ENDIAN
)
12886 offset
= XEXP (XEXP (SET_DEST (real2
), 0), 1);
12887 real2
= replace_rtx (real2
, offset
, GEN_INT (INTVAL (offset
) + 4));
12890 reg
= SET_SRC (synth
);
12892 synth
= replace_rtx (synth
, reg
,
12893 gen_rtx_REG (SImode
, REGNO (reg
) + 1200));
12895 offset
= XEXP (XEXP (SET_DEST (synth
), 0), 1);
12896 synth
= replace_rtx (synth
, offset
,
12897 GEN_INT (INTVAL (offset
)
12898 + (BYTES_BIG_ENDIAN
? 0 : 4)));
12900 RTX_FRAME_RELATED_P (synth
) = 1;
12901 RTX_FRAME_RELATED_P (real2
) = 1;
12902 if (BYTES_BIG_ENDIAN
)
12903 real
= gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, synth
, real2
));
12905 real
= gen_rtx_PARALLEL (VOIDmode
, gen_rtvec (2, real2
, synth
));
12910 /* Returns an insn that has a vrsave set operation with the
12911 appropriate CLOBBERs. */
12914 generate_set_vrsave (rtx reg
, rs6000_stack_t
*info
, int epiloguep
)
12917 rtx insn
, clobs
[TOTAL_ALTIVEC_REGS
+ 1];
12918 rtx vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
12921 = gen_rtx_SET (VOIDmode
,
12923 gen_rtx_UNSPEC_VOLATILE (SImode
,
12924 gen_rtvec (2, reg
, vrsave
),
12929 /* We need to clobber the registers in the mask so the scheduler
12930 does not move sets to VRSAVE before sets of AltiVec registers.
12932 However, if the function receives nonlocal gotos, reload will set
12933 all call saved registers live. We will end up with:
12935 (set (reg 999) (mem))
12936 (parallel [ (set (reg vrsave) (unspec blah))
12937 (clobber (reg 999))])
12939 The clobber will cause the store into reg 999 to be dead, and
12940 flow will attempt to delete an epilogue insn. In this case, we
12941 need an unspec use/set of the register. */
12943 for (i
= FIRST_ALTIVEC_REGNO
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
12944 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
12946 if (!epiloguep
|| call_used_regs
[i
])
12947 clobs
[nclobs
++] = gen_rtx_CLOBBER (VOIDmode
,
12948 gen_rtx_REG (V4SImode
, i
));
12951 rtx reg
= gen_rtx_REG (V4SImode
, i
);
12954 = gen_rtx_SET (VOIDmode
,
12956 gen_rtx_UNSPEC (V4SImode
,
12957 gen_rtvec (1, reg
), 27));
12961 insn
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nclobs
));
12963 for (i
= 0; i
< nclobs
; ++i
)
12964 XVECEXP (insn
, 0, i
) = clobs
[i
];
12969 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
12970 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
12973 emit_frame_save (rtx frame_reg
, rtx frame_ptr
, enum machine_mode mode
,
12974 unsigned int regno
, int offset
, HOST_WIDE_INT total_size
)
12976 rtx reg
, offset_rtx
, insn
, mem
, addr
, int_rtx
;
12977 rtx replacea
, replaceb
;
12979 int_rtx
= GEN_INT (offset
);
12981 /* Some cases that need register indexed addressing. */
12982 if ((TARGET_ALTIVEC_ABI
&& ALTIVEC_VECTOR_MODE (mode
))
12984 && SPE_VECTOR_MODE (mode
)
12985 && !SPE_CONST_OFFSET_OK (offset
)))
12987 /* Whomever calls us must make sure r11 is available in the
12988 flow path of instructions in the prologue. */
12989 offset_rtx
= gen_rtx_REG (Pmode
, 11);
12990 emit_move_insn (offset_rtx
, int_rtx
);
12992 replacea
= offset_rtx
;
12993 replaceb
= int_rtx
;
12997 offset_rtx
= int_rtx
;
12998 replacea
= NULL_RTX
;
12999 replaceb
= NULL_RTX
;
13002 reg
= gen_rtx_REG (mode
, regno
);
13003 addr
= gen_rtx_PLUS (Pmode
, frame_reg
, offset_rtx
);
13004 mem
= gen_rtx_MEM (mode
, addr
);
13005 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13007 insn
= emit_move_insn (mem
, reg
);
13009 rs6000_frame_related (insn
, frame_ptr
, total_size
, replacea
, replaceb
);
13012 /* Emit an offset memory reference suitable for a frame store, while
13013 converting to a valid addressing mode. */
13016 gen_frame_mem_offset (enum machine_mode mode
, rtx reg
, int offset
)
13018 rtx int_rtx
, offset_rtx
;
13020 int_rtx
= GEN_INT (offset
);
13022 if (TARGET_SPE_ABI
&& SPE_VECTOR_MODE (mode
))
13024 offset_rtx
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
13025 emit_move_insn (offset_rtx
, int_rtx
);
13028 offset_rtx
= int_rtx
;
13030 return gen_rtx_MEM (mode
, gen_rtx_PLUS (Pmode
, reg
, offset_rtx
));
13033 #ifndef TARGET_FIX_AND_CONTINUE
13034 #define TARGET_FIX_AND_CONTINUE 0
13037 /* Emit function prologue as insns. */
13040 rs6000_emit_prologue (void)
13042 rs6000_stack_t
*info
= rs6000_stack_info ();
13043 enum machine_mode reg_mode
= Pmode
;
13044 int reg_size
= TARGET_32BIT
? 4 : 8;
13045 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, STACK_POINTER_REGNUM
);
13046 rtx frame_ptr_rtx
= gen_rtx_REG (Pmode
, 12);
13047 rtx frame_reg_rtx
= sp_reg_rtx
;
13048 rtx cr_save_rtx
= NULL_RTX
;
13050 int saving_FPRs_inline
;
13051 int using_store_multiple
;
13052 HOST_WIDE_INT sp_offset
= 0;
13054 if (TARGET_FIX_AND_CONTINUE
)
13056 /* gdb on darwin arranges to forward a function from the old
13057 address by modifying the first 4 instructions of the function
13058 to branch to the overriding function. This is necessary to
13059 permit function pointers that point to the old function to
13060 actually forward to the new function. */
13061 emit_insn (gen_nop ());
13062 emit_insn (gen_nop ());
13063 emit_insn (gen_nop ());
13064 emit_insn (gen_nop ());
13067 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
13069 reg_mode
= V2SImode
;
13073 using_store_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
13074 && (!TARGET_SPE_ABI
13075 || info
->spe_64bit_regs_used
== 0)
13076 && info
->first_gp_reg_save
< 31);
13077 saving_FPRs_inline
= (info
->first_fp_reg_save
== 64
13078 || FP_SAVE_INLINE (info
->first_fp_reg_save
)
13079 || current_function_calls_eh_return
13080 || cfun
->machine
->ra_need_lr
);
13082 /* For V.4, update stack before we do any saving and set back pointer. */
13084 && (DEFAULT_ABI
== ABI_V4
13085 || current_function_calls_eh_return
))
13087 if (info
->total_size
< 32767)
13088 sp_offset
= info
->total_size
;
13090 frame_reg_rtx
= frame_ptr_rtx
;
13091 rs6000_emit_allocate_stack (info
->total_size
,
13092 (frame_reg_rtx
!= sp_reg_rtx
13093 && (info
->cr_save_p
13095 || info
->first_fp_reg_save
< 64
13096 || info
->first_gp_reg_save
< 32
13098 if (frame_reg_rtx
!= sp_reg_rtx
)
13099 rs6000_emit_stack_tie ();
13102 /* Handle world saves specially here. */
13103 if (info
->world_save_p
)
13109 /* save_world expects lr in r0. */
13110 if (info
->lr_save_p
)
13112 insn
= emit_move_insn (gen_rtx_REG (Pmode
, 0),
13113 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
13114 RTX_FRAME_RELATED_P (insn
) = 1;
13117 /* The SAVE_WORLD and RESTORE_WORLD routines make a number of
13118 assumptions about the offsets of various bits of the stack
13119 frame. Abort if things aren't what they should be. */
13120 if (info
->gp_save_offset
!= -220
13121 || info
->fp_save_offset
!= -144
13122 || info
->lr_save_offset
!= 8
13123 || info
->cr_save_offset
!= 4
13125 || !info
->lr_save_p
13126 || (current_function_calls_eh_return
&& info
->ehrd_offset
!= -432)
13127 || (info
->vrsave_save_offset
!= -224
13128 || info
->altivec_save_offset
!= (-224 -16 -192)))
13131 treg
= gen_rtx_REG (SImode
, 11);
13132 emit_move_insn (treg
, GEN_INT (-info
->total_size
));
13134 /* SAVE_WORLD takes the caller's LR in R0 and the frame size
13135 in R11. It also clobbers R12, so beware! */
13137 /* Preserve CR2 for save_world prologues */
13139 sz
+= 32 - info
->first_gp_reg_save
;
13140 sz
+= 64 - info
->first_fp_reg_save
;
13141 sz
+= LAST_ALTIVEC_REGNO
- info
->first_altivec_reg_save
+ 1;
13142 p
= rtvec_alloc (sz
);
13144 RTVEC_ELT (p
, j
++) = gen_rtx_CLOBBER (VOIDmode
,
13145 gen_rtx_REG (Pmode
,
13146 LINK_REGISTER_REGNUM
));
13147 RTVEC_ELT (p
, j
++) = gen_rtx_USE (VOIDmode
,
13148 gen_rtx_SYMBOL_REF (Pmode
,
13150 /* We do floats first so that the instruction pattern matches
13152 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
13154 rtx reg
= gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
);
13155 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13156 GEN_INT (info
->fp_save_offset
13157 + sp_offset
+ 8 * i
));
13158 rtx mem
= gen_rtx_MEM (DFmode
, addr
);
13159 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13161 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, mem
, reg
);
13163 for (i
= 0; info
->first_altivec_reg_save
+ i
<= LAST_ALTIVEC_REGNO
; i
++)
13165 rtx reg
= gen_rtx_REG (V4SImode
, info
->first_altivec_reg_save
+ i
);
13166 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13167 GEN_INT (info
->altivec_save_offset
13168 + sp_offset
+ 16 * i
));
13169 rtx mem
= gen_rtx_MEM (V4SImode
, addr
);
13170 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13172 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, mem
, reg
);
13174 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
13176 rtx reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
13177 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13178 GEN_INT (info
->gp_save_offset
13179 + sp_offset
+ reg_size
* i
));
13180 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
13181 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13183 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, mem
, reg
);
13187 /* CR register traditionally saved as CR2. */
13188 rtx reg
= gen_rtx_REG (reg_mode
, CR2_REGNO
);
13189 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13190 GEN_INT (info
->cr_save_offset
13192 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
13193 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13195 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, mem
, reg
);
13197 /* Prevent any attempt to delete the setting of r0 and treg! */
13198 RTVEC_ELT (p
, j
++) = gen_rtx_USE (VOIDmode
, gen_rtx_REG (Pmode
, 0));
13199 RTVEC_ELT (p
, j
++) = gen_rtx_USE (VOIDmode
, treg
);
13200 RTVEC_ELT (p
, j
++) = gen_rtx_CLOBBER (VOIDmode
, sp_reg_rtx
);
13202 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
13203 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
13204 NULL_RTX
, NULL_RTX
);
13206 if (current_function_calls_eh_return
)
13211 unsigned int regno
= EH_RETURN_DATA_REGNO (i
);
13212 if (regno
== INVALID_REGNUM
)
13214 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, reg_mode
, regno
,
13215 info
->ehrd_offset
+ sp_offset
13216 + reg_size
* (int) i
,
13222 /* Save AltiVec registers if needed. */
13223 if (! info
->world_save_p
&& TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
13227 /* There should be a non inline version of this, for when we
13228 are saving lots of vector registers. */
13229 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
13230 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
13232 rtx areg
, savereg
, mem
;
13235 offset
= info
->altivec_save_offset
+ sp_offset
13236 + 16 * (i
- info
->first_altivec_reg_save
);
13238 savereg
= gen_rtx_REG (V4SImode
, i
);
13240 areg
= gen_rtx_REG (Pmode
, 0);
13241 emit_move_insn (areg
, GEN_INT (offset
));
13243 /* AltiVec addressing mode is [reg+reg]. */
13244 mem
= gen_rtx_MEM (V4SImode
,
13245 gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
));
13247 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13249 insn
= emit_move_insn (mem
, savereg
);
13251 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
13252 areg
, GEN_INT (offset
));
13256 /* VRSAVE is a bit vector representing which AltiVec registers
13257 are used. The OS uses this to determine which vector
13258 registers to save on a context switch. We need to save
13259 VRSAVE on the stack frame, add whatever AltiVec registers we
13260 used in this function, and do the corresponding magic in the
13263 if (TARGET_ALTIVEC
&& TARGET_ALTIVEC_VRSAVE
13264 && ! info
->world_save_p
&& info
->vrsave_mask
!= 0)
13266 rtx reg
, mem
, vrsave
;
13269 /* Get VRSAVE onto a GPR. */
13270 reg
= gen_rtx_REG (SImode
, 12);
13271 vrsave
= gen_rtx_REG (SImode
, VRSAVE_REGNO
);
13273 emit_insn (gen_get_vrsave_internal (reg
));
13275 emit_insn (gen_rtx_SET (VOIDmode
, reg
, vrsave
));
13278 offset
= info
->vrsave_save_offset
+ sp_offset
;
13280 = gen_rtx_MEM (SImode
,
13281 gen_rtx_PLUS (Pmode
, frame_reg_rtx
, GEN_INT (offset
)));
13282 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13283 insn
= emit_move_insn (mem
, reg
);
13285 /* Include the registers in the mask. */
13286 emit_insn (gen_iorsi3 (reg
, reg
, GEN_INT ((int) info
->vrsave_mask
)));
13288 insn
= emit_insn (generate_set_vrsave (reg
, info
, 0));
13291 /* If we use the link register, get it into r0. */
13292 if (! info
->world_save_p
&& info
->lr_save_p
)
13294 insn
= emit_move_insn (gen_rtx_REG (Pmode
, 0),
13295 gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
));
13296 RTX_FRAME_RELATED_P (insn
) = 1;
13299 /* If we need to save CR, put it into r12. */
13300 if (! info
->world_save_p
&& info
->cr_save_p
&& frame_reg_rtx
!= frame_ptr_rtx
)
13304 cr_save_rtx
= gen_rtx_REG (SImode
, 12);
13305 insn
= emit_insn (gen_movesi_from_cr (cr_save_rtx
));
13306 RTX_FRAME_RELATED_P (insn
) = 1;
13307 /* Now, there's no way that dwarf2out_frame_debug_expr is going
13308 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
13309 But that's OK. All we have to do is specify that _one_ condition
13310 code register is saved in this stack slot. The thrower's epilogue
13311 will then restore all the call-saved registers.
13312 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
13313 set
= gen_rtx_SET (VOIDmode
, cr_save_rtx
,
13314 gen_rtx_REG (SImode
, CR2_REGNO
));
13315 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
13320 /* Do any required saving of fpr's. If only one or two to save, do
13321 it ourselves. Otherwise, call function. */
13322 if (! info
->world_save_p
&& saving_FPRs_inline
)
13325 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
13326 if ((regs_ever_live
[info
->first_fp_reg_save
+i
]
13327 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
13328 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, DFmode
,
13329 info
->first_fp_reg_save
+ i
,
13330 info
->fp_save_offset
+ sp_offset
+ 8 * i
,
13333 else if (! info
->world_save_p
&& info
->first_fp_reg_save
!= 64)
13337 const char *alloc_rname
;
13339 p
= rtvec_alloc (2 + 64 - info
->first_fp_reg_save
);
13341 RTVEC_ELT (p
, 0) = gen_rtx_CLOBBER (VOIDmode
,
13342 gen_rtx_REG (Pmode
,
13343 LINK_REGISTER_REGNUM
));
13344 sprintf (rname
, "%s%d%s", SAVE_FP_PREFIX
,
13345 info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
);
13346 alloc_rname
= ggc_strdup (rname
);
13347 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
13348 gen_rtx_SYMBOL_REF (Pmode
,
13350 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
13352 rtx addr
, reg
, mem
;
13353 reg
= gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
);
13354 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13355 GEN_INT (info
->fp_save_offset
13356 + sp_offset
+ 8*i
));
13357 mem
= gen_rtx_MEM (DFmode
, addr
);
13358 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13360 RTVEC_ELT (p
, i
+ 2) = gen_rtx_SET (VOIDmode
, mem
, reg
);
13362 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
13363 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
13364 NULL_RTX
, NULL_RTX
);
13367 /* Save GPRs. This is done as a PARALLEL if we are using
13368 the store-multiple instructions. */
13369 if (! info
->world_save_p
&& using_store_multiple
)
13373 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
13374 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
13376 rtx addr
, reg
, mem
;
13377 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
13378 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13379 GEN_INT (info
->gp_save_offset
13382 mem
= gen_rtx_MEM (reg_mode
, addr
);
13383 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13385 RTVEC_ELT (p
, i
) = gen_rtx_SET (VOIDmode
, mem
, reg
);
13387 insn
= emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
13388 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
13389 NULL_RTX
, NULL_RTX
);
13391 else if (! info
->world_save_p
)
13394 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
13395 if ((regs_ever_live
[info
->first_gp_reg_save
+i
]
13396 && (! call_used_regs
[info
->first_gp_reg_save
+i
]
13397 || (i
+info
->first_gp_reg_save
13398 == RS6000_PIC_OFFSET_TABLE_REGNUM
13399 && TARGET_TOC
&& TARGET_MINIMAL_TOC
)))
13400 || (i
+info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
13401 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
13402 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))))
13404 rtx addr
, reg
, mem
;
13405 reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
13407 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
13409 int offset
= info
->spe_gp_save_offset
+ sp_offset
+ 8 * i
;
13412 if (!SPE_CONST_OFFSET_OK (offset
))
13414 b
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
13415 emit_move_insn (b
, GEN_INT (offset
));
13418 b
= GEN_INT (offset
);
13420 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, b
);
13421 mem
= gen_rtx_MEM (V2SImode
, addr
);
13422 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13423 insn
= emit_move_insn (mem
, reg
);
13425 if (GET_CODE (b
) == CONST_INT
)
13426 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
13427 NULL_RTX
, NULL_RTX
);
13429 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
13430 b
, GEN_INT (offset
));
13434 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13435 GEN_INT (info
->gp_save_offset
13438 mem
= gen_rtx_MEM (reg_mode
, addr
);
13439 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13441 insn
= emit_move_insn (mem
, reg
);
13442 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
13443 NULL_RTX
, NULL_RTX
);
13448 /* ??? There's no need to emit actual instructions here, but it's the
13449 easiest way to get the frame unwind information emitted. */
13450 if (! info
->world_save_p
&& current_function_calls_eh_return
)
13452 unsigned int i
, regno
;
13454 /* In AIX ABI we need to pretend we save r2 here. */
13457 rtx addr
, reg
, mem
;
13459 reg
= gen_rtx_REG (reg_mode
, 2);
13460 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13461 GEN_INT (sp_offset
+ 5 * reg_size
));
13462 mem
= gen_rtx_MEM (reg_mode
, addr
);
13463 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13465 insn
= emit_move_insn (mem
, reg
);
13466 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
13467 NULL_RTX
, NULL_RTX
);
13468 PATTERN (insn
) = gen_blockage ();
13473 regno
= EH_RETURN_DATA_REGNO (i
);
13474 if (regno
== INVALID_REGNUM
)
13477 emit_frame_save (frame_reg_rtx
, frame_ptr_rtx
, reg_mode
, regno
,
13478 info
->ehrd_offset
+ sp_offset
13479 + reg_size
* (int) i
,
13484 /* Save lr if we used it. */
13485 if (! info
->world_save_p
&& info
->lr_save_p
)
13487 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13488 GEN_INT (info
->lr_save_offset
+ sp_offset
));
13489 rtx reg
= gen_rtx_REG (Pmode
, 0);
13490 rtx mem
= gen_rtx_MEM (Pmode
, addr
);
13491 /* This should not be of rs6000_sr_alias_set, because of
13492 __builtin_return_address. */
13494 insn
= emit_move_insn (mem
, reg
);
13495 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
13496 NULL_RTX
, NULL_RTX
);
13499 /* Save CR if we use any that must be preserved. */
13500 if (! info
->world_save_p
&& info
->cr_save_p
)
13502 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13503 GEN_INT (info
->cr_save_offset
+ sp_offset
));
13504 rtx mem
= gen_rtx_MEM (SImode
, addr
);
13505 /* See the large comment above about why CR2_REGNO is used. */
13506 rtx magic_eh_cr_reg
= gen_rtx_REG (SImode
, CR2_REGNO
);
13508 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13510 /* If r12 was used to hold the original sp, copy cr into r0 now
13512 if (REGNO (frame_reg_rtx
) == 12)
13516 cr_save_rtx
= gen_rtx_REG (SImode
, 0);
13517 insn
= emit_insn (gen_movesi_from_cr (cr_save_rtx
));
13518 RTX_FRAME_RELATED_P (insn
) = 1;
13519 set
= gen_rtx_SET (VOIDmode
, cr_save_rtx
, magic_eh_cr_reg
);
13520 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
13525 insn
= emit_move_insn (mem
, cr_save_rtx
);
13527 rs6000_frame_related (insn
, frame_ptr_rtx
, info
->total_size
,
13528 NULL_RTX
, NULL_RTX
);
13531 /* Update stack and set back pointer unless this is V.4,
13532 for which it was done previously. */
13533 if (! info
->world_save_p
&& info
->push_p
13534 && !(DEFAULT_ABI
== ABI_V4
|| current_function_calls_eh_return
))
13535 rs6000_emit_allocate_stack (info
->total_size
, FALSE
);
13537 /* Set frame pointer, if needed. */
13538 if (frame_pointer_needed
)
13540 insn
= emit_move_insn (gen_rtx_REG (Pmode
, FRAME_POINTER_REGNUM
),
13542 RTX_FRAME_RELATED_P (insn
) = 1;
13545 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
13546 if ((TARGET_TOC
&& TARGET_MINIMAL_TOC
&& get_pool_size () != 0)
13547 || (DEFAULT_ABI
== ABI_V4
&& flag_pic
== 1
13548 && regs_ever_live
[RS6000_PIC_OFFSET_TABLE_REGNUM
]))
13550 /* If emit_load_toc_table will use the link register, we need to save
13551 it. We use R12 for this purpose because emit_load_toc_table
13552 can use register 0. This allows us to use a plain 'blr' to return
13553 from the procedure more often. */
13554 int save_LR_around_toc_setup
= (TARGET_ELF
13555 && DEFAULT_ABI
!= ABI_AIX
13557 && ! info
->lr_save_p
13558 && EDGE_COUNT (EXIT_BLOCK_PTR
->preds
) > 0);
13559 if (save_LR_around_toc_setup
)
13561 rtx lr
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
13563 insn
= emit_move_insn (frame_ptr_rtx
, lr
);
13564 rs6000_maybe_dead (insn
);
13565 RTX_FRAME_RELATED_P (insn
) = 1;
13567 rs6000_emit_load_toc_table (TRUE
);
13569 insn
= emit_move_insn (lr
, frame_ptr_rtx
);
13570 rs6000_maybe_dead (insn
);
13571 RTX_FRAME_RELATED_P (insn
) = 1;
13574 rs6000_emit_load_toc_table (TRUE
);
13578 if (DEFAULT_ABI
== ABI_DARWIN
13579 && flag_pic
&& current_function_uses_pic_offset_table
)
13581 rtx lr
= gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
);
13582 rtx src
= machopic_function_base_sym ();
13584 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (lr
, src
)));
13586 insn
= emit_move_insn (gen_rtx_REG (Pmode
,
13587 RS6000_PIC_OFFSET_TABLE_REGNUM
),
13589 rs6000_maybe_dead (insn
);
13594 /* Write function prologue. */
13597 rs6000_output_function_prologue (FILE *file
,
13598 HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
13600 rs6000_stack_t
*info
= rs6000_stack_info ();
13602 if (TARGET_DEBUG_STACK
)
13603 debug_stack_info (info
);
13605 /* Write .extern for any function we will call to save and restore
13607 if (info
->first_fp_reg_save
< 64
13608 && !FP_SAVE_INLINE (info
->first_fp_reg_save
))
13609 fprintf (file
, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
13610 SAVE_FP_PREFIX
, info
->first_fp_reg_save
- 32, SAVE_FP_SUFFIX
,
13611 RESTORE_FP_PREFIX
, info
->first_fp_reg_save
- 32,
13612 RESTORE_FP_SUFFIX
);
13614 /* Write .extern for AIX common mode routines, if needed. */
13615 if (! TARGET_POWER
&& ! TARGET_POWERPC
&& ! common_mode_defined
)
13617 fputs ("\t.extern __mulh\n", file
);
13618 fputs ("\t.extern __mull\n", file
);
13619 fputs ("\t.extern __divss\n", file
);
13620 fputs ("\t.extern __divus\n", file
);
13621 fputs ("\t.extern __quoss\n", file
);
13622 fputs ("\t.extern __quous\n", file
);
13623 common_mode_defined
= 1;
13626 if (! HAVE_prologue
)
13630 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
13631 the "toplevel" insn chain. */
13632 emit_note (NOTE_INSN_DELETED
);
13633 rs6000_emit_prologue ();
13634 emit_note (NOTE_INSN_DELETED
);
13636 /* Expand INSN_ADDRESSES so final() doesn't crash. */
13640 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
13642 INSN_ADDRESSES_NEW (insn
, addr
);
13647 if (TARGET_DEBUG_STACK
)
13648 debug_rtx_list (get_insns (), 100);
13649 final (get_insns (), file
, FALSE
, FALSE
);
13653 rs6000_pic_labelno
++;
13656 /* Emit function epilogue as insns.
13658 At present, dwarf2out_frame_debug_expr doesn't understand
13659 register restores, so we don't bother setting RTX_FRAME_RELATED_P
13660 anywhere in the epilogue. Most of the insns below would in any case
13661 need special notes to explain where r11 is in relation to the stack. */
13664 rs6000_emit_epilogue (int sibcall
)
13666 rs6000_stack_t
*info
;
13667 int restoring_FPRs_inline
;
13668 int using_load_multiple
;
13669 int using_mfcr_multiple
;
13670 int use_backchain_to_restore_sp
;
13672 rtx sp_reg_rtx
= gen_rtx_REG (Pmode
, 1);
13673 rtx frame_reg_rtx
= sp_reg_rtx
;
13674 enum machine_mode reg_mode
= Pmode
;
13675 int reg_size
= TARGET_32BIT
? 4 : 8;
13678 info
= rs6000_stack_info ();
13680 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
13682 reg_mode
= V2SImode
;
13686 using_load_multiple
= (TARGET_MULTIPLE
&& ! TARGET_POWERPC64
13687 && (!TARGET_SPE_ABI
13688 || info
->spe_64bit_regs_used
== 0)
13689 && info
->first_gp_reg_save
< 31);
13690 restoring_FPRs_inline
= (sibcall
13691 || current_function_calls_eh_return
13692 || info
->first_fp_reg_save
== 64
13693 || FP_SAVE_INLINE (info
->first_fp_reg_save
));
13694 use_backchain_to_restore_sp
= (frame_pointer_needed
13695 || current_function_calls_alloca
13696 || info
->total_size
> 32767);
13697 using_mfcr_multiple
= (rs6000_cpu
== PROCESSOR_PPC601
13698 || rs6000_cpu
== PROCESSOR_PPC603
13699 || rs6000_cpu
== PROCESSOR_PPC750
13702 if (info
->world_save_p
)
13706 const char *alloc_rname
;
13709 /* eh_rest_world_r10 will return to the location saved in the LR
13710 stack slot (which is not likely to be our caller.)
13711 Input: R10 -- stack adjustment. Clobbers R0, R11, R12, R7, R8.
13712 rest_world is similar, except any R10 parameter is ignored.
13713 The exception-handling stuff that was here in 2.95 is no
13714 longer necessary. */
13718 + 32 - info
->first_gp_reg_save
13719 + LAST_ALTIVEC_REGNO
+ 1 - info
->first_altivec_reg_save
13720 + 63 + 1 - info
->first_fp_reg_save
);
13722 strcpy (rname
, (current_function_calls_eh_return
) ?
13723 "*eh_rest_world_r10" : "*rest_world");
13724 alloc_rname
= ggc_strdup (rname
);
13727 RTVEC_ELT (p
, j
++) = gen_rtx_RETURN (VOIDmode
);
13728 RTVEC_ELT (p
, j
++) = gen_rtx_USE (VOIDmode
,
13729 gen_rtx_REG (Pmode
,
13730 LINK_REGISTER_REGNUM
));
13732 = gen_rtx_USE (VOIDmode
, gen_rtx_SYMBOL_REF (Pmode
, alloc_rname
));
13733 /* The instruction pattern requires a clobber here;
13734 it is shared with the restVEC helper. */
13736 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 11));
13739 /* CR register traditionally saved as CR2. */
13740 rtx reg
= gen_rtx_REG (reg_mode
, CR2_REGNO
);
13741 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13742 GEN_INT (info
->cr_save_offset
));
13743 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
13744 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13746 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, reg
, mem
);
13749 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
13751 rtx reg
= gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
);
13752 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13753 GEN_INT (info
->gp_save_offset
13755 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
13756 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13758 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, reg
, mem
);
13760 for (i
= 0; info
->first_altivec_reg_save
+ i
<= LAST_ALTIVEC_REGNO
; i
++)
13762 rtx reg
= gen_rtx_REG (V4SImode
, info
->first_altivec_reg_save
+ i
);
13763 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13764 GEN_INT (info
->altivec_save_offset
13766 rtx mem
= gen_rtx_MEM (V4SImode
, addr
);
13767 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13769 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, reg
, mem
);
13771 for (i
= 0; info
->first_fp_reg_save
+ i
<= 63; i
++)
13773 rtx reg
= gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
);
13774 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13775 GEN_INT (info
->fp_save_offset
13777 rtx mem
= gen_rtx_MEM (DFmode
, addr
);
13778 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13780 RTVEC_ELT (p
, j
++) = gen_rtx_SET (VOIDmode
, reg
, mem
);
13783 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 0));
13785 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (SImode
, 12));
13787 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (SImode
, 7));
13789 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (SImode
, 8));
13791 = gen_rtx_USE (VOIDmode
, gen_rtx_REG (SImode
, 10));
13792 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
13797 /* If we have a frame pointer, a call to alloca, or a large stack
13798 frame, restore the old stack pointer using the backchain. Otherwise,
13799 we know what size to update it with. */
13800 if (use_backchain_to_restore_sp
)
13802 /* Under V.4, don't reset the stack pointer until after we're done
13803 loading the saved registers. */
13804 if (DEFAULT_ABI
== ABI_V4
)
13805 frame_reg_rtx
= gen_rtx_REG (Pmode
, 11);
13807 emit_move_insn (frame_reg_rtx
,
13808 gen_rtx_MEM (Pmode
, sp_reg_rtx
));
13811 else if (info
->push_p
)
13813 if (DEFAULT_ABI
== ABI_V4
13814 || current_function_calls_eh_return
)
13815 sp_offset
= info
->total_size
;
13818 emit_insn (TARGET_32BIT
13819 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
13820 GEN_INT (info
->total_size
))
13821 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
13822 GEN_INT (info
->total_size
)));
13826 /* Restore AltiVec registers if needed. */
13827 if (TARGET_ALTIVEC_ABI
&& info
->altivec_size
!= 0)
13831 for (i
= info
->first_altivec_reg_save
; i
<= LAST_ALTIVEC_REGNO
; ++i
)
13832 if (info
->vrsave_mask
& ALTIVEC_REG_BIT (i
))
13834 rtx addr
, areg
, mem
;
13836 areg
= gen_rtx_REG (Pmode
, 0);
13838 (areg
, GEN_INT (info
->altivec_save_offset
13840 + 16 * (i
- info
->first_altivec_reg_save
)));
13842 /* AltiVec addressing mode is [reg+reg]. */
13843 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, areg
);
13844 mem
= gen_rtx_MEM (V4SImode
, addr
);
13845 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13847 emit_move_insn (gen_rtx_REG (V4SImode
, i
), mem
);
13851 /* Restore VRSAVE if needed. */
13852 if (TARGET_ALTIVEC
&& TARGET_ALTIVEC_VRSAVE
13853 && info
->vrsave_mask
!= 0)
13855 rtx addr
, mem
, reg
;
13857 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13858 GEN_INT (info
->vrsave_save_offset
+ sp_offset
));
13859 mem
= gen_rtx_MEM (SImode
, addr
);
13860 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13861 reg
= gen_rtx_REG (SImode
, 12);
13862 emit_move_insn (reg
, mem
);
13864 emit_insn (generate_set_vrsave (reg
, info
, 1));
13867 /* Get the old lr if we saved it. */
13868 if (info
->lr_save_p
)
13870 rtx mem
= gen_frame_mem_offset (Pmode
, frame_reg_rtx
,
13871 info
->lr_save_offset
+ sp_offset
);
13873 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13875 emit_move_insn (gen_rtx_REG (Pmode
, 0), mem
);
13878 /* Get the old cr if we saved it. */
13879 if (info
->cr_save_p
)
13881 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13882 GEN_INT (info
->cr_save_offset
+ sp_offset
));
13883 rtx mem
= gen_rtx_MEM (SImode
, addr
);
13885 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13887 emit_move_insn (gen_rtx_REG (SImode
, 12), mem
);
13890 /* Set LR here to try to overlap restores below. */
13891 if (info
->lr_save_p
)
13892 emit_move_insn (gen_rtx_REG (Pmode
, LINK_REGISTER_REGNUM
),
13893 gen_rtx_REG (Pmode
, 0));
13895 /* Load exception handler data registers, if needed. */
13896 if (current_function_calls_eh_return
)
13898 unsigned int i
, regno
;
13902 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13903 GEN_INT (sp_offset
+ 5 * reg_size
));
13904 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
13906 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13908 emit_move_insn (gen_rtx_REG (reg_mode
, 2), mem
);
13915 regno
= EH_RETURN_DATA_REGNO (i
);
13916 if (regno
== INVALID_REGNUM
)
13919 mem
= gen_frame_mem_offset (reg_mode
, frame_reg_rtx
,
13920 info
->ehrd_offset
+ sp_offset
13921 + reg_size
* (int) i
);
13922 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13924 emit_move_insn (gen_rtx_REG (reg_mode
, regno
), mem
);
13928 /* Restore GPRs. This is done as a PARALLEL if we are using
13929 the load-multiple instructions. */
13930 if (using_load_multiple
)
13933 p
= rtvec_alloc (32 - info
->first_gp_reg_save
);
13934 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
13936 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13937 GEN_INT (info
->gp_save_offset
13940 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
13942 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13945 gen_rtx_SET (VOIDmode
,
13946 gen_rtx_REG (reg_mode
, info
->first_gp_reg_save
+ i
),
13949 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
13952 for (i
= 0; i
< 32 - info
->first_gp_reg_save
; i
++)
13953 if ((regs_ever_live
[info
->first_gp_reg_save
+i
]
13954 && (! call_used_regs
[info
->first_gp_reg_save
+i
]
13955 || (i
+info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
13956 && TARGET_TOC
&& TARGET_MINIMAL_TOC
)))
13957 || (i
+info
->first_gp_reg_save
== RS6000_PIC_OFFSET_TABLE_REGNUM
13958 && ((DEFAULT_ABI
== ABI_V4
&& flag_pic
!= 0)
13959 || (DEFAULT_ABI
== ABI_DARWIN
&& flag_pic
))))
13961 rtx addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13962 GEN_INT (info
->gp_save_offset
13965 rtx mem
= gen_rtx_MEM (reg_mode
, addr
);
13967 /* Restore 64-bit quantities for SPE. */
13968 if (TARGET_SPE_ABI
&& info
->spe_64bit_regs_used
!= 0)
13970 int offset
= info
->spe_gp_save_offset
+ sp_offset
+ 8 * i
;
13973 if (!SPE_CONST_OFFSET_OK (offset
))
13975 b
= gen_rtx_REG (Pmode
, FIXED_SCRATCH
);
13976 emit_move_insn (b
, GEN_INT (offset
));
13979 b
= GEN_INT (offset
);
13981 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
, b
);
13982 mem
= gen_rtx_MEM (V2SImode
, addr
);
13985 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
13987 emit_move_insn (gen_rtx_REG (reg_mode
,
13988 info
->first_gp_reg_save
+ i
), mem
);
13991 /* Restore fpr's if we need to do it without calling a function. */
13992 if (restoring_FPRs_inline
)
13993 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
13994 if ((regs_ever_live
[info
->first_fp_reg_save
+i
]
13995 && ! call_used_regs
[info
->first_fp_reg_save
+i
]))
13998 addr
= gen_rtx_PLUS (Pmode
, frame_reg_rtx
,
13999 GEN_INT (info
->fp_save_offset
14002 mem
= gen_rtx_MEM (DFmode
, addr
);
14003 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
14005 emit_move_insn (gen_rtx_REG (DFmode
,
14006 info
->first_fp_reg_save
+ i
),
14010 /* If we saved cr, restore it here. Just those that were used. */
14011 if (info
->cr_save_p
)
14013 rtx r12_rtx
= gen_rtx_REG (SImode
, 12);
14016 if (using_mfcr_multiple
)
14018 for (i
= 0; i
< 8; i
++)
14019 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
14025 if (using_mfcr_multiple
&& count
> 1)
14030 p
= rtvec_alloc (count
);
14033 for (i
= 0; i
< 8; i
++)
14034 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
14036 rtvec r
= rtvec_alloc (2);
14037 RTVEC_ELT (r
, 0) = r12_rtx
;
14038 RTVEC_ELT (r
, 1) = GEN_INT (1 << (7-i
));
14039 RTVEC_ELT (p
, ndx
) =
14040 gen_rtx_SET (VOIDmode
, gen_rtx_REG (CCmode
, CR0_REGNO
+i
),
14041 gen_rtx_UNSPEC (CCmode
, r
, UNSPEC_MOVESI_TO_CR
));
14044 emit_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
14049 for (i
= 0; i
< 8; i
++)
14050 if (regs_ever_live
[CR0_REGNO
+i
] && ! call_used_regs
[CR0_REGNO
+i
])
14052 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode
,
14058 /* If this is V.4, unwind the stack pointer after all of the loads
14059 have been done. We need to emit a block here so that sched
14060 doesn't decide to move the sp change before the register restores
14061 (which may not have any obvious dependency on the stack). This
14062 doesn't hurt performance, because there is no scheduling that can
14063 be done after this point. */
14064 if (DEFAULT_ABI
== ABI_V4
14065 || current_function_calls_eh_return
)
14067 if (frame_reg_rtx
!= sp_reg_rtx
)
14068 rs6000_emit_stack_tie ();
14070 if (use_backchain_to_restore_sp
)
14072 emit_move_insn (sp_reg_rtx
, frame_reg_rtx
);
14074 else if (sp_offset
!= 0)
14076 emit_insn (TARGET_32BIT
14077 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
,
14078 GEN_INT (sp_offset
))
14079 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
,
14080 GEN_INT (sp_offset
)));
14084 if (current_function_calls_eh_return
)
14086 rtx sa
= EH_RETURN_STACKADJ_RTX
;
14087 emit_insn (TARGET_32BIT
14088 ? gen_addsi3 (sp_reg_rtx
, sp_reg_rtx
, sa
)
14089 : gen_adddi3 (sp_reg_rtx
, sp_reg_rtx
, sa
));
14095 if (! restoring_FPRs_inline
)
14096 p
= rtvec_alloc (3 + 64 - info
->first_fp_reg_save
);
14098 p
= rtvec_alloc (2);
14100 RTVEC_ELT (p
, 0) = gen_rtx_RETURN (VOIDmode
);
14101 RTVEC_ELT (p
, 1) = gen_rtx_USE (VOIDmode
,
14102 gen_rtx_REG (Pmode
,
14103 LINK_REGISTER_REGNUM
));
14105 /* If we have to restore more than two FP registers, branch to the
14106 restore function. It will return to our caller. */
14107 if (! restoring_FPRs_inline
)
14111 const char *alloc_rname
;
14113 sprintf (rname
, "%s%d%s", RESTORE_FP_PREFIX
,
14114 info
->first_fp_reg_save
- 32, RESTORE_FP_SUFFIX
);
14115 alloc_rname
= ggc_strdup (rname
);
14116 RTVEC_ELT (p
, 2) = gen_rtx_USE (VOIDmode
,
14117 gen_rtx_SYMBOL_REF (Pmode
,
14120 for (i
= 0; i
< 64 - info
->first_fp_reg_save
; i
++)
14123 addr
= gen_rtx_PLUS (Pmode
, sp_reg_rtx
,
14124 GEN_INT (info
->fp_save_offset
+ 8*i
));
14125 mem
= gen_rtx_MEM (DFmode
, addr
);
14126 set_mem_alias_set (mem
, rs6000_sr_alias_set
);
14128 RTVEC_ELT (p
, i
+3) =
14129 gen_rtx_SET (VOIDmode
,
14130 gen_rtx_REG (DFmode
, info
->first_fp_reg_save
+ i
),
14135 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode
, p
));
14139 /* Write function epilogue. */
14142 rs6000_output_function_epilogue (FILE *file
,
14143 HOST_WIDE_INT size ATTRIBUTE_UNUSED
)
14145 rs6000_stack_t
*info
= rs6000_stack_info ();
14147 if (! HAVE_epilogue
)
14149 rtx insn
= get_last_insn ();
14150 /* If the last insn was a BARRIER, we don't have to write anything except
14151 the trace table. */
14152 if (GET_CODE (insn
) == NOTE
)
14153 insn
= prev_nonnote_insn (insn
);
14154 if (insn
== 0 || GET_CODE (insn
) != BARRIER
)
14156 /* This is slightly ugly, but at least we don't have two
14157 copies of the epilogue-emitting code. */
14160 /* A NOTE_INSN_DELETED is supposed to be at the start
14161 and end of the "toplevel" insn chain. */
14162 emit_note (NOTE_INSN_DELETED
);
14163 rs6000_emit_epilogue (FALSE
);
14164 emit_note (NOTE_INSN_DELETED
);
14166 /* Expand INSN_ADDRESSES so final() doesn't crash. */
14170 for (insn
= get_insns (); insn
!= 0; insn
= NEXT_INSN (insn
))
14172 INSN_ADDRESSES_NEW (insn
, addr
);
14177 if (TARGET_DEBUG_STACK
)
14178 debug_rtx_list (get_insns (), 100);
14179 final (get_insns (), file
, FALSE
, FALSE
);
14185 macho_branch_islands ();
14186 /* Mach-O doesn't support labels at the end of objects, so if
14187 it looks like we might want one, insert a NOP. */
14189 rtx insn
= get_last_insn ();
14192 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_DELETED_LABEL
)
14193 insn
= PREV_INSN (insn
);
14197 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_DELETED_LABEL
)))
14198 fputs ("\tnop\n", file
);
14202 /* Output a traceback table here. See /usr/include/sys/debug.h for info
14205 We don't output a traceback table if -finhibit-size-directive was
14206 used. The documentation for -finhibit-size-directive reads
14207 ``don't output a @code{.size} assembler directive, or anything
14208 else that would cause trouble if the function is split in the
14209 middle, and the two halves are placed at locations far apart in
14210 memory.'' The traceback table has this property, since it
14211 includes the offset from the start of the function to the
14212 traceback table itself.
14214 System V.4 Powerpc's (and the embedded ABI derived from it) use a
14215 different traceback table. */
14216 if (DEFAULT_ABI
== ABI_AIX
&& ! flag_inhibit_size_directive
14217 && rs6000_traceback
!= traceback_none
)
14219 const char *fname
= NULL
;
14220 const char *language_string
= lang_hooks
.name
;
14221 int fixed_parms
= 0, float_parms
= 0, parm_info
= 0;
14223 int optional_tbtab
;
14225 if (rs6000_traceback
== traceback_full
)
14226 optional_tbtab
= 1;
14227 else if (rs6000_traceback
== traceback_part
)
14228 optional_tbtab
= 0;
14230 optional_tbtab
= !optimize_size
&& !TARGET_ELF
;
14232 if (optional_tbtab
)
14234 fname
= XSTR (XEXP (DECL_RTL (current_function_decl
), 0), 0);
14235 while (*fname
== '.') /* V.4 encodes . in the name */
14238 /* Need label immediately before tbtab, so we can compute
14239 its offset from the function start. */
14240 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
14241 ASM_OUTPUT_LABEL (file
, fname
);
14244 /* The .tbtab pseudo-op can only be used for the first eight
14245 expressions, since it can't handle the possibly variable
14246 length fields that follow. However, if you omit the optional
14247 fields, the assembler outputs zeros for all optional fields
14248 anyways, giving each variable length field is minimum length
14249 (as defined in sys/debug.h). Thus we can not use the .tbtab
14250 pseudo-op at all. */
14252 /* An all-zero word flags the start of the tbtab, for debuggers
14253 that have to find it by searching forward from the entry
14254 point or from the current pc. */
14255 fputs ("\t.long 0\n", file
);
14257 /* Tbtab format type. Use format type 0. */
14258 fputs ("\t.byte 0,", file
);
14260 /* Language type. Unfortunately, there does not seem to be any
14261 official way to discover the language being compiled, so we
14262 use language_string.
14263 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
14264 Java is 13. Objective-C is 14. */
14265 if (! strcmp (language_string
, "GNU C"))
14267 else if (! strcmp (language_string
, "GNU F77")
14268 || ! strcmp (language_string
, "GNU F95"))
14270 else if (! strcmp (language_string
, "GNU Pascal"))
14272 else if (! strcmp (language_string
, "GNU Ada"))
14274 else if (! strcmp (language_string
, "GNU C++"))
14276 else if (! strcmp (language_string
, "GNU Java"))
14278 else if (! strcmp (language_string
, "GNU Objective-C"))
14282 fprintf (file
, "%d,", i
);
14284 /* 8 single bit fields: global linkage (not set for C extern linkage,
14285 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
14286 from start of procedure stored in tbtab, internal function, function
14287 has controlled storage, function has no toc, function uses fp,
14288 function logs/aborts fp operations. */
14289 /* Assume that fp operations are used if any fp reg must be saved. */
14290 fprintf (file
, "%d,",
14291 (optional_tbtab
<< 5) | ((info
->first_fp_reg_save
!= 64) << 1));
14293 /* 6 bitfields: function is interrupt handler, name present in
14294 proc table, function calls alloca, on condition directives
14295 (controls stack walks, 3 bits), saves condition reg, saves
14297 /* The `function calls alloca' bit seems to be set whenever reg 31 is
14298 set up as a frame pointer, even when there is no alloca call. */
14299 fprintf (file
, "%d,",
14300 ((optional_tbtab
<< 6)
14301 | ((optional_tbtab
& frame_pointer_needed
) << 5)
14302 | (info
->cr_save_p
<< 1)
14303 | (info
->lr_save_p
)));
14305 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
14307 fprintf (file
, "%d,",
14308 (info
->push_p
<< 7) | (64 - info
->first_fp_reg_save
));
14310 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
14311 fprintf (file
, "%d,", (32 - first_reg_to_save ()));
14313 if (optional_tbtab
)
14315 /* Compute the parameter info from the function decl argument
14318 int next_parm_info_bit
= 31;
14320 for (decl
= DECL_ARGUMENTS (current_function_decl
);
14321 decl
; decl
= TREE_CHAIN (decl
))
14323 rtx parameter
= DECL_INCOMING_RTL (decl
);
14324 enum machine_mode mode
= GET_MODE (parameter
);
14326 if (GET_CODE (parameter
) == REG
)
14328 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
14334 if (mode
== SFmode
)
14336 else if (mode
== DFmode
|| mode
== TFmode
)
14341 /* If only one bit will fit, don't or in this entry. */
14342 if (next_parm_info_bit
> 0)
14343 parm_info
|= (bits
<< (next_parm_info_bit
- 1));
14344 next_parm_info_bit
-= 2;
14348 fixed_parms
+= ((GET_MODE_SIZE (mode
)
14349 + (UNITS_PER_WORD
- 1))
14351 next_parm_info_bit
-= 1;
14357 /* Number of fixed point parameters. */
14358 /* This is actually the number of words of fixed point parameters; thus
14359 an 8 byte struct counts as 2; and thus the maximum value is 8. */
14360 fprintf (file
, "%d,", fixed_parms
);
14362 /* 2 bitfields: number of floating point parameters (7 bits), parameters
14364 /* This is actually the number of fp registers that hold parameters;
14365 and thus the maximum value is 13. */
14366 /* Set parameters on stack bit if parameters are not in their original
14367 registers, regardless of whether they are on the stack? Xlc
14368 seems to set the bit when not optimizing. */
14369 fprintf (file
, "%d\n", ((float_parms
<< 1) | (! optimize
)));
14371 if (! optional_tbtab
)
14374 /* Optional fields follow. Some are variable length. */
14376 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
14377 11 double float. */
14378 /* There is an entry for each parameter in a register, in the order that
14379 they occur in the parameter list. Any intervening arguments on the
14380 stack are ignored. If the list overflows a long (max possible length
14381 34 bits) then completely leave off all elements that don't fit. */
14382 /* Only emit this long if there was at least one parameter. */
14383 if (fixed_parms
|| float_parms
)
14384 fprintf (file
, "\t.long %d\n", parm_info
);
14386 /* Offset from start of code to tb table. */
14387 fputs ("\t.long ", file
);
14388 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LT");
14390 RS6000_OUTPUT_BASENAME (file
, fname
);
14392 assemble_name (file
, fname
);
14394 rs6000_output_function_entry (file
, fname
);
14397 /* Interrupt handler mask. */
14398 /* Omit this long, since we never set the interrupt handler bit
14401 /* Number of CTL (controlled storage) anchors. */
14402 /* Omit this long, since the has_ctl bit is never set above. */
14404 /* Displacement into stack of each CTL anchor. */
14405 /* Omit this list of longs, because there are no CTL anchors. */
14407 /* Length of function name. */
14410 fprintf (file
, "\t.short %d\n", (int) strlen (fname
));
14412 /* Function name. */
14413 assemble_string (fname
, strlen (fname
));
14415 /* Register for alloca automatic storage; this is always reg 31.
14416 Only emit this if the alloca bit was set above. */
14417 if (frame_pointer_needed
)
14418 fputs ("\t.byte 31\n", file
);
14420 fputs ("\t.align 2\n", file
);
14424 /* A C compound statement that outputs the assembler code for a thunk
14425 function, used to implement C++ virtual function calls with
14426 multiple inheritance. The thunk acts as a wrapper around a virtual
14427 function, adjusting the implicit object parameter before handing
14428 control off to the real function.
14430 First, emit code to add the integer DELTA to the location that
14431 contains the incoming first argument. Assume that this argument
14432 contains a pointer, and is the one used to pass the `this' pointer
14433 in C++. This is the incoming argument *before* the function
14434 prologue, e.g. `%o0' on a sparc. The addition must preserve the
14435 values of all other incoming arguments.
14437 After the addition, emit code to jump to FUNCTION, which is a
14438 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
14439 not touch the return address. Hence returning from FUNCTION will
14440 return to whoever called the current `thunk'.
14442 The effect must be as if FUNCTION had been called directly with the
14443 adjusted first argument. This macro is responsible for emitting
14444 all of the code for a thunk function; output_function_prologue()
14445 and output_function_epilogue() are not invoked.
14447 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
14448 been extracted from it.) It might possibly be useful on some
14449 targets, but probably not.
14451 If you do not define this macro, the target-independent code in the
14452 C++ frontend will generate a less efficient heavyweight thunk that
14453 calls FUNCTION instead of jumping to it. The generic approach does
14454 not support varargs. */
14457 rs6000_output_mi_thunk (FILE *file
, tree thunk_fndecl ATTRIBUTE_UNUSED
,
14458 HOST_WIDE_INT delta
, HOST_WIDE_INT vcall_offset
,
14461 rtx
this, insn
, funexp
;
14463 reload_completed
= 1;
14464 epilogue_completed
= 1;
14465 no_new_pseudos
= 1;
14466 reset_block_changes ();
14468 /* Mark the end of the (empty) prologue. */
14469 emit_note (NOTE_INSN_PROLOGUE_END
);
14471 /* Find the "this" pointer. If the function returns a structure,
14472 the structure return pointer is in r3. */
14473 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function
)), function
))
14474 this = gen_rtx_REG (Pmode
, 4);
14476 this = gen_rtx_REG (Pmode
, 3);
14478 /* Apply the constant offset, if required. */
14481 rtx delta_rtx
= GEN_INT (delta
);
14482 emit_insn (TARGET_32BIT
14483 ? gen_addsi3 (this, this, delta_rtx
)
14484 : gen_adddi3 (this, this, delta_rtx
));
14487 /* Apply the offset from the vtable, if required. */
14490 rtx vcall_offset_rtx
= GEN_INT (vcall_offset
);
14491 rtx tmp
= gen_rtx_REG (Pmode
, 12);
14493 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, this));
14494 if (((unsigned HOST_WIDE_INT
) vcall_offset
) + 0x8000 >= 0x10000)
14496 emit_insn (TARGET_32BIT
14497 ? gen_addsi3 (tmp
, tmp
, vcall_offset_rtx
)
14498 : gen_adddi3 (tmp
, tmp
, vcall_offset_rtx
));
14499 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, tmp
));
14503 rtx loc
= gen_rtx_PLUS (Pmode
, tmp
, vcall_offset_rtx
);
14505 emit_move_insn (tmp
, gen_rtx_MEM (Pmode
, loc
));
14507 emit_insn (TARGET_32BIT
14508 ? gen_addsi3 (this, this, tmp
)
14509 : gen_adddi3 (this, this, tmp
));
14512 /* Generate a tail call to the target function. */
14513 if (!TREE_USED (function
))
14515 assemble_external (function
);
14516 TREE_USED (function
) = 1;
14518 funexp
= XEXP (DECL_RTL (function
), 0);
14519 funexp
= gen_rtx_MEM (FUNCTION_MODE
, funexp
);
14522 if (MACHOPIC_INDIRECT
)
14523 funexp
= machopic_indirect_call_target (funexp
);
14526 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
14527 generate sibcall RTL explicitly to avoid constraint abort. */
14528 insn
= emit_call_insn (
14529 gen_rtx_PARALLEL (VOIDmode
,
14531 gen_rtx_CALL (VOIDmode
,
14532 funexp
, const0_rtx
),
14533 gen_rtx_USE (VOIDmode
, const0_rtx
),
14534 gen_rtx_USE (VOIDmode
,
14535 gen_rtx_REG (SImode
,
14536 LINK_REGISTER_REGNUM
)),
14537 gen_rtx_RETURN (VOIDmode
))));
14538 SIBLING_CALL_P (insn
) = 1;
14541 /* Run just enough of rest_of_compilation to get the insns emitted.
14542 There's not really enough bulk here to make other passes such as
14543 instruction scheduling worth while. Note that use_thunk calls
14544 assemble_start_function and assemble_end_function. */
14545 insn
= get_insns ();
14546 insn_locators_initialize ();
14547 shorten_branches (insn
);
14548 final_start_function (insn
, file
, 1);
14549 final (insn
, file
, 1, 0);
14550 final_end_function ();
14552 reload_completed
= 0;
14553 epilogue_completed
= 0;
14554 no_new_pseudos
= 0;
14557 /* A quick summary of the various types of 'constant-pool tables'
14560 Target Flags Name One table per
14561 AIX (none) AIX TOC object file
14562 AIX -mfull-toc AIX TOC object file
14563 AIX -mminimal-toc AIX minimal TOC translation unit
14564 SVR4/EABI (none) SVR4 SDATA object file
14565 SVR4/EABI -fpic SVR4 pic object file
14566 SVR4/EABI -fPIC SVR4 PIC translation unit
14567 SVR4/EABI -mrelocatable EABI TOC function
14568 SVR4/EABI -maix AIX TOC object file
14569 SVR4/EABI -maix -mminimal-toc
14570 AIX minimal TOC translation unit
14572 Name Reg. Set by entries contains:
14573 made by addrs? fp? sum?
14575 AIX TOC 2 crt0 as Y option option
14576 AIX minimal TOC 30 prolog gcc Y Y option
14577 SVR4 SDATA 13 crt0 gcc N Y N
14578 SVR4 pic 30 prolog ld Y not yet N
14579 SVR4 PIC 30 prolog gcc Y option option
14580 EABI TOC 30 prolog gcc Y option option
14584 /* Hash functions for the hash table. */
14587 rs6000_hash_constant (rtx k
)
14589 enum rtx_code code
= GET_CODE (k
);
14590 enum machine_mode mode
= GET_MODE (k
);
14591 unsigned result
= (code
<< 3) ^ mode
;
14592 const char *format
;
14595 format
= GET_RTX_FORMAT (code
);
14596 flen
= strlen (format
);
14602 return result
* 1231 + (unsigned) INSN_UID (XEXP (k
, 0));
14605 if (mode
!= VOIDmode
)
14606 return real_hash (CONST_DOUBLE_REAL_VALUE (k
)) * result
;
14618 for (; fidx
< flen
; fidx
++)
14619 switch (format
[fidx
])
14624 const char *str
= XSTR (k
, fidx
);
14625 len
= strlen (str
);
14626 result
= result
* 613 + len
;
14627 for (i
= 0; i
< len
; i
++)
14628 result
= result
* 613 + (unsigned) str
[i
];
14633 result
= result
* 1231 + rs6000_hash_constant (XEXP (k
, fidx
));
14637 result
= result
* 613 + (unsigned) XINT (k
, fidx
);
14640 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT
))
14641 result
= result
* 613 + (unsigned) XWINT (k
, fidx
);
14645 for (i
= 0; i
< sizeof(HOST_WIDE_INT
)/sizeof(unsigned); i
++)
14646 result
= result
* 613 + (unsigned) (XWINT (k
, fidx
)
14660 toc_hash_function (const void *hash_entry
)
14662 const struct toc_hash_struct
*thc
=
14663 (const struct toc_hash_struct
*) hash_entry
;
14664 return rs6000_hash_constant (thc
->key
) ^ thc
->key_mode
;
14667 /* Compare H1 and H2 for equivalence. */
14670 toc_hash_eq (const void *h1
, const void *h2
)
14672 rtx r1
= ((const struct toc_hash_struct
*) h1
)->key
;
14673 rtx r2
= ((const struct toc_hash_struct
*) h2
)->key
;
14675 if (((const struct toc_hash_struct
*) h1
)->key_mode
14676 != ((const struct toc_hash_struct
*) h2
)->key_mode
)
14679 return rtx_equal_p (r1
, r2
);
14682 /* These are the names given by the C++ front-end to vtables, and
14683 vtable-like objects. Ideally, this logic should not be here;
14684 instead, there should be some programmatic way of inquiring as
14685 to whether or not an object is a vtable. */
14687 #define VTABLE_NAME_P(NAME) \
14688 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
14689 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
14690 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
14691 || strncmp ("_ZTI", name, strlen ("_ZTI")) == 0 \
14692 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
14695 rs6000_output_symbol_ref (FILE *file
, rtx x
)
14697 /* Currently C++ toc references to vtables can be emitted before it
14698 is decided whether the vtable is public or private. If this is
14699 the case, then the linker will eventually complain that there is
14700 a reference to an unknown section. Thus, for vtables only,
14701 we emit the TOC reference to reference the symbol and not the
14703 const char *name
= XSTR (x
, 0);
14705 if (VTABLE_NAME_P (name
))
14707 RS6000_OUTPUT_BASENAME (file
, name
);
14710 assemble_name (file
, name
);
14713 /* Output a TOC entry. We derive the entry name from what is being
14717 output_toc (FILE *file
, rtx x
, int labelno
, enum machine_mode mode
)
14720 const char *name
= buf
;
14721 const char *real_name
;
14728 /* When the linker won't eliminate them, don't output duplicate
14729 TOC entries (this happens on AIX if there is any kind of TOC,
14730 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
14732 if (TARGET_TOC
&& GET_CODE (x
) != LABEL_REF
)
14734 struct toc_hash_struct
*h
;
14737 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
14738 time because GGC is not initialized at that point. */
14739 if (toc_hash_table
== NULL
)
14740 toc_hash_table
= htab_create_ggc (1021, toc_hash_function
,
14741 toc_hash_eq
, NULL
);
14743 h
= ggc_alloc (sizeof (*h
));
14745 h
->key_mode
= mode
;
14746 h
->labelno
= labelno
;
14748 found
= htab_find_slot (toc_hash_table
, h
, 1);
14749 if (*found
== NULL
)
14751 else /* This is indeed a duplicate.
14752 Set this label equal to that label. */
14754 fputs ("\t.set ", file
);
14755 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
14756 fprintf (file
, "%d,", labelno
);
14757 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file
, "LC");
14758 fprintf (file
, "%d\n", ((*(const struct toc_hash_struct
**)
14764 /* If we're going to put a double constant in the TOC, make sure it's
14765 aligned properly when strict alignment is on. */
14766 if (GET_CODE (x
) == CONST_DOUBLE
14767 && STRICT_ALIGNMENT
14768 && GET_MODE_BITSIZE (mode
) >= 64
14769 && ! (TARGET_NO_FP_IN_TOC
&& ! TARGET_MINIMAL_TOC
)) {
14770 ASM_OUTPUT_ALIGN (file
, 3);
14773 (*targetm
.asm_out
.internal_label
) (file
, "LC", labelno
);
14775 /* Handle FP constants specially. Note that if we have a minimal
14776 TOC, things we put here aren't actually in the TOC, so we can allow
14778 if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == TFmode
)
14780 REAL_VALUE_TYPE rv
;
14783 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
14784 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv
, k
);
14788 if (TARGET_MINIMAL_TOC
)
14789 fputs (DOUBLE_INT_ASM_OP
, file
);
14791 fprintf (file
, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
14792 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
14793 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
14794 fprintf (file
, "0x%lx%08lx,0x%lx%08lx\n",
14795 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
14796 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
14801 if (TARGET_MINIMAL_TOC
)
14802 fputs ("\t.long ", file
);
14804 fprintf (file
, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
14805 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
14806 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
14807 fprintf (file
, "0x%lx,0x%lx,0x%lx,0x%lx\n",
14808 k
[0] & 0xffffffff, k
[1] & 0xffffffff,
14809 k
[2] & 0xffffffff, k
[3] & 0xffffffff);
14813 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == DFmode
)
14815 REAL_VALUE_TYPE rv
;
14818 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
14819 REAL_VALUE_TO_TARGET_DOUBLE (rv
, k
);
14823 if (TARGET_MINIMAL_TOC
)
14824 fputs (DOUBLE_INT_ASM_OP
, file
);
14826 fprintf (file
, "\t.tc FD_%lx_%lx[TC],",
14827 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
14828 fprintf (file
, "0x%lx%08lx\n",
14829 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
14834 if (TARGET_MINIMAL_TOC
)
14835 fputs ("\t.long ", file
);
14837 fprintf (file
, "\t.tc FD_%lx_%lx[TC],",
14838 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
14839 fprintf (file
, "0x%lx,0x%lx\n",
14840 k
[0] & 0xffffffff, k
[1] & 0xffffffff);
14844 else if (GET_CODE (x
) == CONST_DOUBLE
&& GET_MODE (x
) == SFmode
)
14846 REAL_VALUE_TYPE rv
;
14849 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
14850 REAL_VALUE_TO_TARGET_SINGLE (rv
, l
);
14854 if (TARGET_MINIMAL_TOC
)
14855 fputs (DOUBLE_INT_ASM_OP
, file
);
14857 fprintf (file
, "\t.tc FS_%lx[TC],", l
& 0xffffffff);
14858 fprintf (file
, "0x%lx00000000\n", l
& 0xffffffff);
14863 if (TARGET_MINIMAL_TOC
)
14864 fputs ("\t.long ", file
);
14866 fprintf (file
, "\t.tc FS_%lx[TC],", l
& 0xffffffff);
14867 fprintf (file
, "0x%lx\n", l
& 0xffffffff);
14871 else if (GET_MODE (x
) == VOIDmode
14872 && (GET_CODE (x
) == CONST_INT
|| GET_CODE (x
) == CONST_DOUBLE
))
14874 unsigned HOST_WIDE_INT low
;
14875 HOST_WIDE_INT high
;
14877 if (GET_CODE (x
) == CONST_DOUBLE
)
14879 low
= CONST_DOUBLE_LOW (x
);
14880 high
= CONST_DOUBLE_HIGH (x
);
14883 #if HOST_BITS_PER_WIDE_INT == 32
14886 high
= (low
& 0x80000000) ? ~0 : 0;
14890 low
= INTVAL (x
) & 0xffffffff;
14891 high
= (HOST_WIDE_INT
) INTVAL (x
) >> 32;
14895 /* TOC entries are always Pmode-sized, but since this
14896 is a bigendian machine then if we're putting smaller
14897 integer constants in the TOC we have to pad them.
14898 (This is still a win over putting the constants in
14899 a separate constant pool, because then we'd have
14900 to have both a TOC entry _and_ the actual constant.)
14902 For a 32-bit target, CONST_INT values are loaded and shifted
14903 entirely within `low' and can be stored in one TOC entry. */
14905 if (TARGET_64BIT
&& POINTER_SIZE
< GET_MODE_BITSIZE (mode
))
14906 abort ();/* It would be easy to make this work, but it doesn't now. */
14908 if (POINTER_SIZE
> GET_MODE_BITSIZE (mode
))
14910 #if HOST_BITS_PER_WIDE_INT == 32
14911 lshift_double (low
, high
, POINTER_SIZE
- GET_MODE_BITSIZE (mode
),
14912 POINTER_SIZE
, &low
, &high
, 0);
14915 low
<<= POINTER_SIZE
- GET_MODE_BITSIZE (mode
);
14916 high
= (HOST_WIDE_INT
) low
>> 32;
14923 if (TARGET_MINIMAL_TOC
)
14924 fputs (DOUBLE_INT_ASM_OP
, file
);
14926 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
14927 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
14928 fprintf (file
, "0x%lx%08lx\n",
14929 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
14934 if (POINTER_SIZE
< GET_MODE_BITSIZE (mode
))
14936 if (TARGET_MINIMAL_TOC
)
14937 fputs ("\t.long ", file
);
14939 fprintf (file
, "\t.tc ID_%lx_%lx[TC],",
14940 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
14941 fprintf (file
, "0x%lx,0x%lx\n",
14942 (long) high
& 0xffffffff, (long) low
& 0xffffffff);
14946 if (TARGET_MINIMAL_TOC
)
14947 fputs ("\t.long ", file
);
14949 fprintf (file
, "\t.tc IS_%lx[TC],", (long) low
& 0xffffffff);
14950 fprintf (file
, "0x%lx\n", (long) low
& 0xffffffff);
14956 if (GET_CODE (x
) == CONST
)
14958 if (GET_CODE (XEXP (x
, 0)) != PLUS
)
14961 base
= XEXP (XEXP (x
, 0), 0);
14962 offset
= INTVAL (XEXP (XEXP (x
, 0), 1));
14965 if (GET_CODE (base
) == SYMBOL_REF
)
14966 name
= XSTR (base
, 0);
14967 else if (GET_CODE (base
) == LABEL_REF
)
14968 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (XEXP (base
, 0)));
14969 else if (GET_CODE (base
) == CODE_LABEL
)
14970 ASM_GENERATE_INTERNAL_LABEL (buf
, "L", CODE_LABEL_NUMBER (base
));
14974 real_name
= (*targetm
.strip_name_encoding
) (name
);
14975 if (TARGET_MINIMAL_TOC
)
14976 fputs (TARGET_32BIT
? "\t.long " : DOUBLE_INT_ASM_OP
, file
);
14979 fprintf (file
, "\t.tc %s", real_name
);
14982 fprintf (file
, ".N%d", - offset
);
14984 fprintf (file
, ".P%d", offset
);
14986 fputs ("[TC],", file
);
14989 /* Currently C++ toc references to vtables can be emitted before it
14990 is decided whether the vtable is public or private. If this is
14991 the case, then the linker will eventually complain that there is
14992 a TOC reference to an unknown section. Thus, for vtables only,
14993 we emit the TOC reference to reference the symbol and not the
14995 if (VTABLE_NAME_P (name
))
14997 RS6000_OUTPUT_BASENAME (file
, name
);
14999 fprintf (file
, "%d", offset
);
15000 else if (offset
> 0)
15001 fprintf (file
, "+%d", offset
);
15004 output_addr_const (file
, x
);
15008 /* Output an assembler pseudo-op to write an ASCII string of N characters
15009 starting at P to FILE.
15011 On the RS/6000, we have to do this using the .byte operation and
15012 write out special characters outside the quoted string.
15013 Also, the assembler is broken; very long strings are truncated,
15014 so we must artificially break them up early. */
15017 output_ascii (FILE *file
, const char *p
, int n
)
15020 int i
, count_string
;
15021 const char *for_string
= "\t.byte \"";
15022 const char *for_decimal
= "\t.byte ";
15023 const char *to_close
= NULL
;
15026 for (i
= 0; i
< n
; i
++)
15029 if (c
>= ' ' && c
< 0177)
15032 fputs (for_string
, file
);
15035 /* Write two quotes to get one. */
15043 for_decimal
= "\"\n\t.byte ";
15047 if (count_string
>= 512)
15049 fputs (to_close
, file
);
15051 for_string
= "\t.byte \"";
15052 for_decimal
= "\t.byte ";
15060 fputs (for_decimal
, file
);
15061 fprintf (file
, "%d", c
);
15063 for_string
= "\n\t.byte \"";
15064 for_decimal
= ", ";
15070 /* Now close the string if we have written one. Then end the line. */
15072 fputs (to_close
, file
);
15075 /* Generate a unique section name for FILENAME for a section type
15076 represented by SECTION_DESC. Output goes into BUF.
15078 SECTION_DESC can be any string, as long as it is different for each
15079 possible section type.
15081 We name the section in the same manner as xlc. The name begins with an
15082 underscore followed by the filename (after stripping any leading directory
15083 names) with the last period replaced by the string SECTION_DESC. If
15084 FILENAME does not contain a period, SECTION_DESC is appended to the end of
15088 rs6000_gen_section_name (char **buf
, const char *filename
,
15089 const char *section_desc
)
15091 const char *q
, *after_last_slash
, *last_period
= 0;
15095 after_last_slash
= filename
;
15096 for (q
= filename
; *q
; q
++)
15099 after_last_slash
= q
+ 1;
15100 else if (*q
== '.')
15104 len
= strlen (after_last_slash
) + strlen (section_desc
) + 2;
15105 *buf
= (char *) xmalloc (len
);
15110 for (q
= after_last_slash
; *q
; q
++)
15112 if (q
== last_period
)
15114 strcpy (p
, section_desc
);
15115 p
+= strlen (section_desc
);
15119 else if (ISALNUM (*q
))
15123 if (last_period
== 0)
15124 strcpy (p
, section_desc
);
15129 /* Emit profile function. */
15132 output_profile_hook (int labelno ATTRIBUTE_UNUSED
)
15134 if (TARGET_PROFILE_KERNEL
)
15137 if (DEFAULT_ABI
== ABI_AIX
)
15139 #ifndef NO_PROFILE_COUNTERS
15140 # define NO_PROFILE_COUNTERS 0
15142 if (NO_PROFILE_COUNTERS
)
15143 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 0);
15147 const char *label_name
;
15150 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
15151 label_name
= (*targetm
.strip_name_encoding
) (ggc_strdup (buf
));
15152 fun
= gen_rtx_SYMBOL_REF (Pmode
, label_name
);
15154 emit_library_call (init_one_libfunc (RS6000_MCOUNT
), 0, VOIDmode
, 1,
15158 else if (DEFAULT_ABI
== ABI_DARWIN
)
15160 const char *mcount_name
= RS6000_MCOUNT
;
15161 int caller_addr_regno
= LINK_REGISTER_REGNUM
;
15163 /* Be conservative and always set this, at least for now. */
15164 current_function_uses_pic_offset_table
= 1;
15167 /* For PIC code, set up a stub and collect the caller's address
15168 from r0, which is where the prologue puts it. */
15169 if (MACHOPIC_INDIRECT
15170 && current_function_uses_pic_offset_table
)
15171 caller_addr_regno
= 0;
15173 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, mcount_name
),
15175 gen_rtx_REG (Pmode
, caller_addr_regno
), Pmode
);
15179 /* Write function profiler code. */
15182 output_function_profiler (FILE *file
, int labelno
)
15187 switch (DEFAULT_ABI
)
15196 warning ("no profiling of 64-bit code for this ABI");
15199 ASM_GENERATE_INTERNAL_LABEL (buf
, "LP", labelno
);
15200 fprintf (file
, "\tmflr %s\n", reg_names
[0]);
15203 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file
);
15204 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
15205 reg_names
[0], save_lr
, reg_names
[1]);
15206 asm_fprintf (file
, "\tmflr %s\n", reg_names
[12]);
15207 asm_fprintf (file
, "\t{l|lwz} %s,", reg_names
[0]);
15208 assemble_name (file
, buf
);
15209 asm_fprintf (file
, "@got(%s)\n", reg_names
[12]);
15211 else if (flag_pic
> 1)
15213 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
15214 reg_names
[0], save_lr
, reg_names
[1]);
15215 /* Now, we need to get the address of the label. */
15216 fputs ("\tbl 1f\n\t.long ", file
);
15217 assemble_name (file
, buf
);
15218 fputs ("-.\n1:", file
);
15219 asm_fprintf (file
, "\tmflr %s\n", reg_names
[11]);
15220 asm_fprintf (file
, "\t{l|lwz} %s,0(%s)\n",
15221 reg_names
[0], reg_names
[11]);
15222 asm_fprintf (file
, "\t{cax|add} %s,%s,%s\n",
15223 reg_names
[0], reg_names
[0], reg_names
[11]);
15227 asm_fprintf (file
, "\t{liu|lis} %s,", reg_names
[12]);
15228 assemble_name (file
, buf
);
15229 fputs ("@ha\n", file
);
15230 asm_fprintf (file
, "\t{st|stw} %s,%d(%s)\n",
15231 reg_names
[0], save_lr
, reg_names
[1]);
15232 asm_fprintf (file
, "\t{cal|la} %s,", reg_names
[0]);
15233 assemble_name (file
, buf
);
15234 asm_fprintf (file
, "@l(%s)\n", reg_names
[12]);
15237 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
15238 fprintf (file
, "\tbl %s%s\n",
15239 RS6000_MCOUNT
, flag_pic
? "@plt" : "");
15244 if (!TARGET_PROFILE_KERNEL
)
15246 /* Don't do anything, done in output_profile_hook (). */
15253 asm_fprintf (file
, "\tmflr %s\n", reg_names
[0]);
15254 asm_fprintf (file
, "\tstd %s,16(%s)\n", reg_names
[0], reg_names
[1]);
15256 if (cfun
->static_chain_decl
!= NULL
)
15258 asm_fprintf (file
, "\tstd %s,24(%s)\n",
15259 reg_names
[STATIC_CHAIN_REGNUM
], reg_names
[1]);
15260 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
15261 asm_fprintf (file
, "\tld %s,24(%s)\n",
15262 reg_names
[STATIC_CHAIN_REGNUM
], reg_names
[1]);
15265 fprintf (file
, "\tbl %s\n", RS6000_MCOUNT
);
15272 /* Power4 load update and store update instructions are cracked into a
15273 load or store and an integer insn which are executed in the same cycle.
15274 Branches have their own dispatch slot which does not count against the
15275 GCC issue rate, but it changes the program flow so there are no other
15276 instructions to issue in this cycle. */
15279 rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED
,
15280 int verbose ATTRIBUTE_UNUSED
,
15281 rtx insn
, int more
)
15283 if (GET_CODE (PATTERN (insn
)) == USE
15284 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
15287 if (rs6000_sched_groups
)
15289 if (is_microcoded_insn (insn
))
15291 else if (is_cracked_insn (insn
))
15292 return more
> 2 ? more
- 2 : 0;
15298 /* Adjust the cost of a scheduling dependency. Return the new cost of
15299 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
15302 rs6000_adjust_cost (rtx insn
, rtx link
, rtx dep_insn
, int cost
)
15304 if (! recog_memoized (insn
))
15307 if (REG_NOTE_KIND (link
) != 0)
15310 if (REG_NOTE_KIND (link
) == 0)
15312 /* Data dependency; DEP_INSN writes a register that INSN reads
15313 some cycles later. */
15314 switch (get_attr_type (insn
))
15317 /* Tell the first scheduling pass about the latency between
15318 a mtctr and bctr (and mtlr and br/blr). The first
15319 scheduling pass will not know about this latency since
15320 the mtctr instruction, which has the latency associated
15321 to it, will be generated by reload. */
15322 return TARGET_POWER
? 5 : 4;
15324 /* Leave some extra cycles between a compare and its
15325 dependent branch, to inhibit expensive mispredicts. */
15326 if ((rs6000_cpu_attr
== CPU_PPC603
15327 || rs6000_cpu_attr
== CPU_PPC604
15328 || rs6000_cpu_attr
== CPU_PPC604E
15329 || rs6000_cpu_attr
== CPU_PPC620
15330 || rs6000_cpu_attr
== CPU_PPC630
15331 || rs6000_cpu_attr
== CPU_PPC750
15332 || rs6000_cpu_attr
== CPU_PPC7400
15333 || rs6000_cpu_attr
== CPU_PPC7450
15334 || rs6000_cpu_attr
== CPU_POWER4
15335 || rs6000_cpu_attr
== CPU_POWER5
)
15336 && recog_memoized (dep_insn
)
15337 && (INSN_CODE (dep_insn
) >= 0)
15338 && (get_attr_type (dep_insn
) == TYPE_CMP
15339 || get_attr_type (dep_insn
) == TYPE_COMPARE
15340 || get_attr_type (dep_insn
) == TYPE_DELAYED_COMPARE
15341 || get_attr_type (dep_insn
) == TYPE_IMUL_COMPARE
15342 || get_attr_type (dep_insn
) == TYPE_LMUL_COMPARE
15343 || get_attr_type (dep_insn
) == TYPE_FPCOMPARE
15344 || get_attr_type (dep_insn
) == TYPE_CR_LOGICAL
15345 || get_attr_type (dep_insn
) == TYPE_DELAYED_CR
))
15350 /* Fall out to return default cost. */
15356 /* The function returns a true if INSN is microcoded.
15357 Return false otherwise. */
15360 is_microcoded_insn (rtx insn
)
15362 if (!insn
|| !INSN_P (insn
)
15363 || GET_CODE (PATTERN (insn
)) == USE
15364 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
15367 if (rs6000_sched_groups
)
15369 enum attr_type type
= get_attr_type (insn
);
15370 if (type
== TYPE_LOAD_EXT_U
15371 || type
== TYPE_LOAD_EXT_UX
15372 || type
== TYPE_LOAD_UX
15373 || type
== TYPE_STORE_UX
15374 || type
== TYPE_MFCR
)
15381 /* The function returns a nonzero value if INSN can be scheduled only
15382 as the first insn in a dispatch group ("dispatch-slot restricted").
15383 In this case, the returned value indicates how many dispatch slots
15384 the insn occupies (at the beginning of the group).
15385 Return 0 otherwise. */
15388 is_dispatch_slot_restricted (rtx insn
)
15390 enum attr_type type
;
15392 if (!rs6000_sched_groups
)
15396 || insn
== NULL_RTX
15397 || GET_CODE (insn
) == NOTE
15398 || GET_CODE (PATTERN (insn
)) == USE
15399 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
15402 type
= get_attr_type (insn
);
15409 case TYPE_DELAYED_CR
:
15410 case TYPE_CR_LOGICAL
:
15418 if (rs6000_cpu
== PROCESSOR_POWER5
15419 && is_cracked_insn (insn
))
15425 /* The function returns true if INSN is cracked into 2 instructions
15426 by the processor (and therefore occupies 2 issue slots). */
15429 is_cracked_insn (rtx insn
)
15431 if (!insn
|| !INSN_P (insn
)
15432 || GET_CODE (PATTERN (insn
)) == USE
15433 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
15436 if (rs6000_sched_groups
)
15438 enum attr_type type
= get_attr_type (insn
);
15439 if (type
== TYPE_LOAD_U
|| type
== TYPE_STORE_U
15440 || type
== TYPE_FPLOAD_U
|| type
== TYPE_FPSTORE_U
15441 || type
== TYPE_FPLOAD_UX
|| type
== TYPE_FPSTORE_UX
15442 || type
== TYPE_LOAD_EXT
|| type
== TYPE_DELAYED_CR
15443 || type
== TYPE_COMPARE
|| type
== TYPE_DELAYED_COMPARE
15444 || type
== TYPE_IMUL_COMPARE
|| type
== TYPE_LMUL_COMPARE
15445 || type
== TYPE_IDIV
|| type
== TYPE_LDIV
15446 || type
== TYPE_INSERT_WORD
)
15453 /* The function returns true if INSN can be issued only from
15454 the branch slot. */
15457 is_branch_slot_insn (rtx insn
)
15459 if (!insn
|| !INSN_P (insn
)
15460 || GET_CODE (PATTERN (insn
)) == USE
15461 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
15464 if (rs6000_sched_groups
)
15466 enum attr_type type
= get_attr_type (insn
);
15467 if (type
== TYPE_BRANCH
|| type
== TYPE_JMPREG
)
15475 /* A C statement (sans semicolon) to update the integer scheduling
15476 priority INSN_PRIORITY (INSN). Increase the priority to execute the
15477 INSN earlier, reduce the priority to execute INSN later. Do not
15478 define this macro if you do not need to adjust the scheduling
15479 priorities of insns. */
15482 rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED
, int priority
)
15484 /* On machines (like the 750) which have asymmetric integer units,
15485 where one integer unit can do multiply and divides and the other
15486 can't, reduce the priority of multiply/divide so it is scheduled
15487 before other integer operations. */
15490 if (! INSN_P (insn
))
15493 if (GET_CODE (PATTERN (insn
)) == USE
)
15496 switch (rs6000_cpu_attr
) {
15498 switch (get_attr_type (insn
))
15505 fprintf (stderr
, "priority was %#x (%d) before adjustment\n",
15506 priority
, priority
);
15507 if (priority
>= 0 && priority
< 0x01000000)
15514 if (is_dispatch_slot_restricted (insn
)
15515 && reload_completed
15516 && current_sched_info
->sched_max_insns_priority
15517 && rs6000_sched_restricted_insns_priority
)
15520 /* Prioritize insns that can be dispatched only in the first dispatch slot. */
15521 if (rs6000_sched_restricted_insns_priority
== 1)
15522 /* Attach highest priority to insn. This means that in
15523 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
15524 precede 'priority' (critical path) considerations. */
15525 return current_sched_info
->sched_max_insns_priority
;
15526 else if (rs6000_sched_restricted_insns_priority
== 2)
15527 /* Increase priority of insn by a minimal amount. This means that in
15528 haifa-sched.c:ready_sort(), only 'priority' (critical path) considerations
15529 precede dispatch-slot restriction considerations. */
15530 return (priority
+ 1);
15536 /* Return how many instructions the machine can issue per cycle. */
15539 rs6000_issue_rate (void)
15541 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
15542 if (!reload_completed
)
15545 switch (rs6000_cpu_attr
) {
15546 case CPU_RIOS1
: /* ? */
15548 case CPU_PPC601
: /* ? */
15571 /* Return how many instructions to look ahead for better insn
15575 rs6000_use_sched_lookahead (void)
15577 if (rs6000_cpu_attr
== CPU_PPC8540
)
15582 /* Determine is PAT refers to memory. */
15585 is_mem_ref (rtx pat
)
15591 if (GET_CODE (pat
) == MEM
)
15594 /* Recursively process the pattern. */
15595 fmt
= GET_RTX_FORMAT (GET_CODE (pat
));
15597 for (i
= GET_RTX_LENGTH (GET_CODE (pat
)) - 1; i
>= 0 && !ret
; i
--)
15600 ret
|= is_mem_ref (XEXP (pat
, i
));
15601 else if (fmt
[i
] == 'E')
15602 for (j
= XVECLEN (pat
, i
) - 1; j
>= 0; j
--)
15603 ret
|= is_mem_ref (XVECEXP (pat
, i
, j
));
15609 /* Determine if PAT is a PATTERN of a load insn. */
15612 is_load_insn1 (rtx pat
)
15614 if (!pat
|| pat
== NULL_RTX
)
15617 if (GET_CODE (pat
) == SET
)
15618 return is_mem_ref (SET_SRC (pat
));
15620 if (GET_CODE (pat
) == PARALLEL
)
15624 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
15625 if (is_load_insn1 (XVECEXP (pat
, 0, i
)))
15632 /* Determine if INSN loads from memory. */
15635 is_load_insn (rtx insn
)
15637 if (!insn
|| !INSN_P (insn
))
15640 if (GET_CODE (insn
) == CALL_INSN
)
15643 return is_load_insn1 (PATTERN (insn
));
15646 /* Determine if PAT is a PATTERN of a store insn. */
15649 is_store_insn1 (rtx pat
)
15651 if (!pat
|| pat
== NULL_RTX
)
15654 if (GET_CODE (pat
) == SET
)
15655 return is_mem_ref (SET_DEST (pat
));
15657 if (GET_CODE (pat
) == PARALLEL
)
15661 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
15662 if (is_store_insn1 (XVECEXP (pat
, 0, i
)))
15669 /* Determine if INSN stores to memory. */
15672 is_store_insn (rtx insn
)
15674 if (!insn
|| !INSN_P (insn
))
15677 return is_store_insn1 (PATTERN (insn
));
15680 /* Returns whether the dependence between INSN and NEXT is considered
15681 costly by the given target. */
15684 rs6000_is_costly_dependence (rtx insn
, rtx next
, rtx link
, int cost
, int distance
)
15686 /* If the flag is not enbled - no dependence is considered costly;
15687 allow all dependent insns in the same group.
15688 This is the most aggressive option. */
15689 if (rs6000_sched_costly_dep
== no_dep_costly
)
15692 /* If the flag is set to 1 - a dependence is always considered costly;
15693 do not allow dependent instructions in the same group.
15694 This is the most conservative option. */
15695 if (rs6000_sched_costly_dep
== all_deps_costly
)
15698 if (rs6000_sched_costly_dep
== store_to_load_dep_costly
15699 && is_load_insn (next
)
15700 && is_store_insn (insn
))
15701 /* Prevent load after store in the same group. */
15704 if (rs6000_sched_costly_dep
== true_store_to_load_dep_costly
15705 && is_load_insn (next
)
15706 && is_store_insn (insn
)
15707 && (!link
|| (int) REG_NOTE_KIND (link
) == 0))
15708 /* Prevent load after store in the same group if it is a true dependence. */
15711 /* The flag is set to X; dependences with latency >= X are considered costly,
15712 and will not be scheduled in the same group. */
15713 if (rs6000_sched_costly_dep
<= max_dep_latency
15714 && ((cost
- distance
) >= (int)rs6000_sched_costly_dep
))
15720 /* Return the next insn after INSN that is found before TAIL is reached,
15721 skipping any "non-active" insns - insns that will not actually occupy
15722 an issue slot. Return NULL_RTX if such an insn is not found. */
15725 get_next_active_insn (rtx insn
, rtx tail
)
15729 if (!insn
|| insn
== tail
)
15732 next_insn
= NEXT_INSN (insn
);
15735 && next_insn
!= tail
15736 && (GET_CODE(next_insn
) == NOTE
15737 || GET_CODE (PATTERN (next_insn
)) == USE
15738 || GET_CODE (PATTERN (next_insn
)) == CLOBBER
))
15740 next_insn
= NEXT_INSN (next_insn
);
15743 if (!next_insn
|| next_insn
== tail
)
15749 /* Return whether the presence of INSN causes a dispatch group termination
15750 of group WHICH_GROUP.
15752 If WHICH_GROUP == current_group, this function will return true if INSN
15753 causes the termination of the current group (i.e, the dispatch group to
15754 which INSN belongs). This means that INSN will be the last insn in the
15755 group it belongs to.
15757 If WHICH_GROUP == previous_group, this function will return true if INSN
15758 causes the termination of the previous group (i.e, the dispatch group that
15759 precedes the group to which INSN belongs). This means that INSN will be
15760 the first insn in the group it belongs to). */
15763 insn_terminates_group_p (rtx insn
, enum group_termination which_group
)
15765 enum attr_type type
;
15770 type
= get_attr_type (insn
);
15772 if (is_microcoded_insn (insn
))
15775 if (which_group
== current_group
)
15777 if (is_branch_slot_insn (insn
))
15781 else if (which_group
== previous_group
)
15783 if (is_dispatch_slot_restricted (insn
))
15791 /* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
15792 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
15795 is_costly_group (rtx
*group_insns
, rtx next_insn
)
15800 int issue_rate
= rs6000_issue_rate ();
15802 for (i
= 0; i
< issue_rate
; i
++)
15804 rtx insn
= group_insns
[i
];
15807 for (link
= INSN_DEPEND (insn
); link
!= 0; link
= XEXP (link
, 1))
15809 rtx next
= XEXP (link
, 0);
15810 if (next
== next_insn
)
15812 cost
= insn_cost (insn
, link
, next_insn
);
15813 if (rs6000_is_costly_dependence (insn
, next_insn
, link
, cost
, 0))
15822 /* Utility of the function redefine_groups.
15823 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
15824 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
15825 to keep it "far" (in a separate group) from GROUP_INSNS, following
15826 one of the following schemes, depending on the value of the flag
15827 -minsert_sched_nops = X:
15828 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
15829 in order to force NEXT_INSN into a separate group.
15830 (2) X < sched_finish_regroup_exact: insert exactly X nops.
15831 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
15832 insertion (has a group just ended, how many vacant issue slots remain in the
15833 last group, and how many dispatch groups were encountered so far). */
15836 force_new_group (int sched_verbose
, FILE *dump
, rtx
*group_insns
, rtx next_insn
,
15837 bool *group_end
, int can_issue_more
, int *group_count
)
15841 int issue_rate
= rs6000_issue_rate ();
15842 bool end
= *group_end
;
15845 if (next_insn
== NULL_RTX
)
15846 return can_issue_more
;
15848 if (rs6000_sched_insert_nops
> sched_finish_regroup_exact
)
15849 return can_issue_more
;
15851 force
= is_costly_group (group_insns
, next_insn
);
15853 return can_issue_more
;
15855 if (sched_verbose
> 6)
15856 fprintf (dump
,"force: group count = %d, can_issue_more = %d\n",
15857 *group_count
,can_issue_more
);
15859 if (rs6000_sched_insert_nops
== sched_finish_regroup_exact
)
15862 can_issue_more
= 0;
15864 /* Since only a branch can be issued in the last issue_slot, it is
15865 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
15866 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
15867 in this case the last nop will start a new group and the branch will be
15868 forced to the new group. */
15869 if (can_issue_more
&& !is_branch_slot_insn (next_insn
))
15872 while (can_issue_more
> 0)
15875 emit_insn_before (nop
, next_insn
);
15883 if (rs6000_sched_insert_nops
< sched_finish_regroup_exact
)
15885 int n_nops
= rs6000_sched_insert_nops
;
15887 /* Nops can't be issued from the branch slot, so the effective
15888 issue_rate for nops is 'issue_rate - 1'. */
15889 if (can_issue_more
== 0)
15890 can_issue_more
= issue_rate
;
15892 if (can_issue_more
== 0)
15894 can_issue_more
= issue_rate
- 1;
15897 for (i
= 0; i
< issue_rate
; i
++)
15899 group_insns
[i
] = 0;
15906 emit_insn_before (nop
, next_insn
);
15907 if (can_issue_more
== issue_rate
- 1) /* new group begins */
15910 if (can_issue_more
== 0)
15912 can_issue_more
= issue_rate
- 1;
15915 for (i
= 0; i
< issue_rate
; i
++)
15917 group_insns
[i
] = 0;
15923 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
15926 *group_end
= /* Is next_insn going to start a new group? */
15928 || (can_issue_more
== 1 && !is_branch_slot_insn (next_insn
))
15929 || (can_issue_more
<= 2 && is_cracked_insn (next_insn
))
15930 || (can_issue_more
< issue_rate
&&
15931 insn_terminates_group_p (next_insn
, previous_group
)));
15932 if (*group_end
&& end
)
15935 if (sched_verbose
> 6)
15936 fprintf (dump
, "done force: group count = %d, can_issue_more = %d\n",
15937 *group_count
, can_issue_more
);
15938 return can_issue_more
;
15941 return can_issue_more
;
15944 /* This function tries to synch the dispatch groups that the compiler "sees"
15945 with the dispatch groups that the processor dispatcher is expected to
15946 form in practice. It tries to achieve this synchronization by forcing the
15947 estimated processor grouping on the compiler (as opposed to the function
15948 'pad_goups' which tries to force the scheduler's grouping on the processor).
15950 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
15951 examines the (estimated) dispatch groups that will be formed by the processor
15952 dispatcher. It marks these group boundaries to reflect the estimated
15953 processor grouping, overriding the grouping that the scheduler had marked.
15954 Depending on the value of the flag '-minsert-sched-nops' this function can
15955 force certain insns into separate groups or force a certain distance between
15956 them by inserting nops, for example, if there exists a "costly dependence"
15959 The function estimates the group boundaries that the processor will form as
15960 folllows: It keeps track of how many vacant issue slots are available after
15961 each insn. A subsequent insn will start a new group if one of the following
15963 - no more vacant issue slots remain in the current dispatch group.
15964 - only the last issue slot, which is the branch slot, is vacant, but the next
15965 insn is not a branch.
15966 - only the last 2 or less issue slots, including the branch slot, are vacant,
15967 which means that a cracked insn (which occupies two issue slots) can't be
15968 issued in this group.
15969 - less than 'issue_rate' slots are vacant, and the next insn always needs to
15970 start a new group. */
15973 redefine_groups (FILE *dump
, int sched_verbose
, rtx prev_head_insn
, rtx tail
)
15975 rtx insn
, next_insn
;
15977 int can_issue_more
;
15980 int group_count
= 0;
15984 issue_rate
= rs6000_issue_rate ();
15985 group_insns
= alloca (issue_rate
* sizeof (rtx
));
15986 for (i
= 0; i
< issue_rate
; i
++)
15988 group_insns
[i
] = 0;
15990 can_issue_more
= issue_rate
;
15992 insn
= get_next_active_insn (prev_head_insn
, tail
);
15995 while (insn
!= NULL_RTX
)
15997 slot
= (issue_rate
- can_issue_more
);
15998 group_insns
[slot
] = insn
;
16000 rs6000_variable_issue (dump
, sched_verbose
, insn
, can_issue_more
);
16001 if (insn_terminates_group_p (insn
, current_group
))
16002 can_issue_more
= 0;
16004 next_insn
= get_next_active_insn (insn
, tail
);
16005 if (next_insn
== NULL_RTX
)
16006 return group_count
+ 1;
16008 group_end
= /* Is next_insn going to start a new group? */
16009 (can_issue_more
== 0
16010 || (can_issue_more
== 1 && !is_branch_slot_insn (next_insn
))
16011 || (can_issue_more
<= 2 && is_cracked_insn (next_insn
))
16012 || (can_issue_more
< issue_rate
&&
16013 insn_terminates_group_p (next_insn
, previous_group
)));
16015 can_issue_more
= force_new_group (sched_verbose
, dump
, group_insns
,
16016 next_insn
, &group_end
, can_issue_more
, &group_count
);
16021 can_issue_more
= 0;
16022 for (i
= 0; i
< issue_rate
; i
++)
16024 group_insns
[i
] = 0;
16028 if (GET_MODE (next_insn
) == TImode
&& can_issue_more
)
16029 PUT_MODE(next_insn
, VOIDmode
);
16030 else if (!can_issue_more
&& GET_MODE (next_insn
) != TImode
)
16031 PUT_MODE (next_insn
, TImode
);
16034 if (can_issue_more
== 0)
16035 can_issue_more
= issue_rate
;
16038 return group_count
;
16041 /* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
16042 dispatch group boundaries that the scheduler had marked. Pad with nops
16043 any dispatch groups which have vacant issue slots, in order to force the
16044 scheduler's grouping on the processor dispatcher. The function
16045 returns the number of dispatch groups found. */
16048 pad_groups (FILE *dump
, int sched_verbose
, rtx prev_head_insn
, rtx tail
)
16050 rtx insn
, next_insn
;
16053 int can_issue_more
;
16055 int group_count
= 0;
16057 /* Initialize issue_rate. */
16058 issue_rate
= rs6000_issue_rate ();
16059 can_issue_more
= issue_rate
;
16061 insn
= get_next_active_insn (prev_head_insn
, tail
);
16062 next_insn
= get_next_active_insn (insn
, tail
);
16064 while (insn
!= NULL_RTX
)
16067 rs6000_variable_issue (dump
, sched_verbose
, insn
, can_issue_more
);
16069 group_end
= (next_insn
== NULL_RTX
|| GET_MODE (next_insn
) == TImode
);
16071 if (next_insn
== NULL_RTX
)
16076 /* If the scheduler had marked group termination at this location
16077 (between insn and next_indn), and neither insn nor next_insn will
16078 force group termination, pad the group with nops to force group
16081 && (rs6000_sched_insert_nops
== sched_finish_pad_groups
)
16082 && !insn_terminates_group_p (insn
, current_group
)
16083 && !insn_terminates_group_p (next_insn
, previous_group
))
16085 if (!is_branch_slot_insn(next_insn
))
16088 while (can_issue_more
)
16091 emit_insn_before (nop
, next_insn
);
16096 can_issue_more
= issue_rate
;
16101 next_insn
= get_next_active_insn (insn
, tail
);
16104 return group_count
;
16107 /* The following function is called at the end of scheduling BB.
16108 After reload, it inserts nops at insn group bundling. */
16111 rs6000_sched_finish (FILE *dump
, int sched_verbose
)
16116 fprintf (dump
, "=== Finishing schedule.\n");
16118 if (reload_completed
&& rs6000_sched_groups
)
16120 if (rs6000_sched_insert_nops
== sched_finish_none
)
16123 if (rs6000_sched_insert_nops
== sched_finish_pad_groups
)
16124 n_groups
= pad_groups (dump
, sched_verbose
,
16125 current_sched_info
->prev_head
,
16126 current_sched_info
->next_tail
);
16128 n_groups
= redefine_groups (dump
, sched_verbose
,
16129 current_sched_info
->prev_head
,
16130 current_sched_info
->next_tail
);
16132 if (sched_verbose
>= 6)
16134 fprintf (dump
, "ngroups = %d\n", n_groups
);
16135 print_rtl (dump
, current_sched_info
->prev_head
);
16136 fprintf (dump
, "Done finish_sched\n");
16141 /* Length in units of the trampoline for entering a nested function. */
16144 rs6000_trampoline_size (void)
16148 switch (DEFAULT_ABI
)
16154 ret
= (TARGET_32BIT
) ? 12 : 24;
16159 ret
= (TARGET_32BIT
) ? 40 : 48;
16166 /* Emit RTL insns to initialize the variable parts of a trampoline.
16167 FNADDR is an RTX for the address of the function's pure code.
16168 CXT is an RTX for the static chain value for the function. */
16171 rs6000_initialize_trampoline (rtx addr
, rtx fnaddr
, rtx cxt
)
16173 enum machine_mode pmode
= Pmode
;
16174 int regsize
= (TARGET_32BIT
) ? 4 : 8;
16175 rtx ctx_reg
= force_reg (pmode
, cxt
);
16177 switch (DEFAULT_ABI
)
16182 /* Macros to shorten the code expansions below. */
16183 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
16184 #define MEM_PLUS(addr,offset) \
16185 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
16187 /* Under AIX, just build the 3 word function descriptor */
16190 rtx fn_reg
= gen_reg_rtx (pmode
);
16191 rtx toc_reg
= gen_reg_rtx (pmode
);
16192 emit_move_insn (fn_reg
, MEM_DEREF (fnaddr
));
16193 emit_move_insn (toc_reg
, MEM_PLUS (fnaddr
, regsize
));
16194 emit_move_insn (MEM_DEREF (addr
), fn_reg
);
16195 emit_move_insn (MEM_PLUS (addr
, regsize
), toc_reg
);
16196 emit_move_insn (MEM_PLUS (addr
, 2*regsize
), ctx_reg
);
16200 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
16203 emit_library_call (gen_rtx_SYMBOL_REF (SImode
, "__trampoline_setup"),
16204 FALSE
, VOIDmode
, 4,
16206 GEN_INT (rs6000_trampoline_size ()), SImode
,
16216 /* Table of valid machine attributes. */
16218 const struct attribute_spec rs6000_attribute_table
[] =
16220 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
16221 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute
},
16222 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
16223 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute
},
16224 { NULL
, 0, 0, false, false, false, NULL
}
16227 /* Handle the "altivec" attribute. The attribute may have
16228 arguments as follows:
16230 __attribute__((altivec(vector__)))
16231 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
16232 __attribute__((altivec(bool__))) (always followed by 'unsigned')
16234 and may appear more than once (e.g., 'vector bool char') in a
16235 given declaration. */
16238 rs6000_handle_altivec_attribute (tree
*node
, tree name
, tree args
,
16239 int flags ATTRIBUTE_UNUSED
,
16240 bool *no_add_attrs
)
16242 tree type
= *node
, result
= NULL_TREE
;
16243 enum machine_mode mode
;
16246 = ((args
&& TREE_CODE (args
) == TREE_LIST
&& TREE_VALUE (args
)
16247 && TREE_CODE (TREE_VALUE (args
)) == IDENTIFIER_NODE
)
16248 ? *IDENTIFIER_POINTER (TREE_VALUE (args
))
16251 while (POINTER_TYPE_P (type
)
16252 || TREE_CODE (type
) == FUNCTION_TYPE
16253 || TREE_CODE (type
) == METHOD_TYPE
16254 || TREE_CODE (type
) == ARRAY_TYPE
)
16255 type
= TREE_TYPE (type
);
16257 mode
= TYPE_MODE (type
);
16259 if (rs6000_warn_altivec_long
16260 && (type
== long_unsigned_type_node
|| type
== long_integer_type_node
))
16261 warning ("use of 'long' in AltiVec types is deprecated; use 'int'");
16263 switch (altivec_type
)
16266 unsigned_p
= TYPE_UNSIGNED (type
);
16270 result
= (unsigned_p
? unsigned_V4SI_type_node
: V4SI_type_node
);
16273 result
= (unsigned_p
? unsigned_V8HI_type_node
: V8HI_type_node
);
16276 result
= (unsigned_p
? unsigned_V16QI_type_node
: V16QI_type_node
);
16278 case SFmode
: result
= V4SF_type_node
; break;
16279 /* If the user says 'vector int bool', we may be handed the 'bool'
16280 attribute _before_ the 'vector' attribute, and so select the proper
16281 type in the 'b' case below. */
16282 case V4SImode
: case V8HImode
: case V16QImode
: case V4SFmode
: result
= type
;
16289 case SImode
: case V4SImode
: result
= bool_V4SI_type_node
; break;
16290 case HImode
: case V8HImode
: result
= bool_V8HI_type_node
; break;
16291 case QImode
: case V16QImode
: result
= bool_V16QI_type_node
;
16298 case V8HImode
: result
= pixel_V8HI_type_node
;
16304 if (result
&& result
!= type
&& TYPE_READONLY (type
))
16305 result
= build_qualified_type (result
, TYPE_QUAL_CONST
);
16307 *no_add_attrs
= true; /* No need to hang on to the attribute. */
16310 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name
));
16312 *node
= reconstruct_complex_type (*node
, result
);
16317 /* AltiVec defines four built-in scalar types that serve as vector
16318 elements; we must teach the compiler how to mangle them. */
16320 static const char *
16321 rs6000_mangle_fundamental_type (tree type
)
16323 if (type
== bool_char_type_node
) return "U6__boolc";
16324 if (type
== bool_short_type_node
) return "U6__bools";
16325 if (type
== pixel_type_node
) return "u7__pixel";
16326 if (type
== bool_int_type_node
) return "U6__booli";
16328 /* For all other types, use normal C++ mangling. */
16332 /* Handle a "longcall" or "shortcall" attribute; arguments as in
16333 struct attribute_spec.handler. */
16336 rs6000_handle_longcall_attribute (tree
*node
, tree name
,
16337 tree args ATTRIBUTE_UNUSED
,
16338 int flags ATTRIBUTE_UNUSED
,
16339 bool *no_add_attrs
)
16341 if (TREE_CODE (*node
) != FUNCTION_TYPE
16342 && TREE_CODE (*node
) != FIELD_DECL
16343 && TREE_CODE (*node
) != TYPE_DECL
)
16345 warning ("`%s' attribute only applies to functions",
16346 IDENTIFIER_POINTER (name
));
16347 *no_add_attrs
= true;
16353 /* Set longcall attributes on all functions declared when
16354 rs6000_default_long_calls is true. */
16356 rs6000_set_default_type_attributes (tree type
)
16358 if (rs6000_default_long_calls
16359 && (TREE_CODE (type
) == FUNCTION_TYPE
16360 || TREE_CODE (type
) == METHOD_TYPE
))
16361 TYPE_ATTRIBUTES (type
) = tree_cons (get_identifier ("longcall"),
16363 TYPE_ATTRIBUTES (type
));
16366 /* Return a reference suitable for calling a function with the
16367 longcall attribute. */
16370 rs6000_longcall_ref (rtx call_ref
)
16372 const char *call_name
;
16375 if (GET_CODE (call_ref
) != SYMBOL_REF
)
16378 /* System V adds '.' to the internal name, so skip them. */
16379 call_name
= XSTR (call_ref
, 0);
16380 if (*call_name
== '.')
16382 while (*call_name
== '.')
16385 node
= get_identifier (call_name
);
16386 call_ref
= gen_rtx_SYMBOL_REF (VOIDmode
, IDENTIFIER_POINTER (node
));
16389 return force_reg (Pmode
, call_ref
);
16392 #ifdef USING_ELFOS_H
16394 /* A C statement or statements to switch to the appropriate section
16395 for output of RTX in mode MODE. You can assume that RTX is some
16396 kind of constant in RTL. The argument MODE is redundant except in
16397 the case of a `const_int' rtx. Select the section by calling
16398 `text_section' or one of the alternatives for other sections.
16400 Do not define this macro if you put all constants in the read-only
16404 rs6000_elf_select_rtx_section (enum machine_mode mode
, rtx x
,
16405 unsigned HOST_WIDE_INT align
)
16407 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
16410 default_elf_select_rtx_section (mode
, x
, align
);
16413 /* A C statement or statements to switch to the appropriate
16414 section for output of DECL. DECL is either a `VAR_DECL' node
16415 or a constant of some sort. RELOC indicates whether forming
16416 the initial value of DECL requires link-time relocations. */
16419 rs6000_elf_select_section (tree decl
, int reloc
,
16420 unsigned HOST_WIDE_INT align
)
16422 /* Pretend that we're always building for a shared library when
16423 ABI_AIX, because otherwise we end up with dynamic relocations
16424 in read-only sections. This happens for function pointers,
16425 references to vtables in typeinfo, and probably other cases. */
16426 default_elf_select_section_1 (decl
, reloc
, align
,
16427 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
16430 /* A C statement to build up a unique section name, expressed as a
16431 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
16432 RELOC indicates whether the initial value of EXP requires
16433 link-time relocations. If you do not define this macro, GCC will use
16434 the symbol name prefixed by `.' as the section name. Note - this
16435 macro can now be called for uninitialized data items as well as
16436 initialized data and functions. */
16439 rs6000_elf_unique_section (tree decl
, int reloc
)
16441 /* As above, pretend that we're always building for a shared library
16442 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
16443 default_unique_section_1 (decl
, reloc
,
16444 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
16447 /* For a SYMBOL_REF, set generic flags and then perform some
16448 target-specific processing.
16450 When the AIX ABI is requested on a non-AIX system, replace the
16451 function name with the real name (with a leading .) rather than the
16452 function descriptor name. This saves a lot of overriding code to
16453 read the prefixes. */
16456 rs6000_elf_encode_section_info (tree decl
, rtx rtl
, int first
)
16458 default_encode_section_info (decl
, rtl
, first
);
16461 && TREE_CODE (decl
) == FUNCTION_DECL
16463 && DEFAULT_ABI
== ABI_AIX
)
16465 rtx sym_ref
= XEXP (rtl
, 0);
16466 size_t len
= strlen (XSTR (sym_ref
, 0));
16467 char *str
= alloca (len
+ 2);
16469 memcpy (str
+ 1, XSTR (sym_ref
, 0), len
+ 1);
16470 XSTR (sym_ref
, 0) = ggc_alloc_string (str
, len
+ 1);
16475 rs6000_elf_in_small_data_p (tree decl
)
16477 if (rs6000_sdata
== SDATA_NONE
)
16480 if (TREE_CODE (decl
) == VAR_DECL
&& DECL_SECTION_NAME (decl
))
16482 const char *section
= TREE_STRING_POINTER (DECL_SECTION_NAME (decl
));
16483 if (strcmp (section
, ".sdata") == 0
16484 || strcmp (section
, ".sdata2") == 0
16485 || strcmp (section
, ".sbss") == 0
16486 || strcmp (section
, ".sbss2") == 0
16487 || strcmp (section
, ".PPC.EMB.sdata0") == 0
16488 || strcmp (section
, ".PPC.EMB.sbss0") == 0)
16493 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (decl
));
16496 && (unsigned HOST_WIDE_INT
) size
<= g_switch_value
16497 /* If it's not public, and we're not going to reference it there,
16498 there's no need to put it in the small data section. */
16499 && (rs6000_sdata
!= SDATA_DATA
|| TREE_PUBLIC (decl
)))
16506 #endif /* USING_ELFOS_H */
16509 /* Return a REG that occurs in ADDR with coefficient 1.
16510 ADDR can be effectively incremented by incrementing REG.
16512 r0 is special and we must not select it as an address
16513 register by this routine since our caller will try to
16514 increment the returned register via an "la" instruction. */
16517 find_addr_reg (rtx addr
)
16519 while (GET_CODE (addr
) == PLUS
)
16521 if (GET_CODE (XEXP (addr
, 0)) == REG
16522 && REGNO (XEXP (addr
, 0)) != 0)
16523 addr
= XEXP (addr
, 0);
16524 else if (GET_CODE (XEXP (addr
, 1)) == REG
16525 && REGNO (XEXP (addr
, 1)) != 0)
16526 addr
= XEXP (addr
, 1);
16527 else if (CONSTANT_P (XEXP (addr
, 0)))
16528 addr
= XEXP (addr
, 1);
16529 else if (CONSTANT_P (XEXP (addr
, 1)))
16530 addr
= XEXP (addr
, 0);
16534 if (GET_CODE (addr
) == REG
&& REGNO (addr
) != 0)
16540 rs6000_fatal_bad_address (rtx op
)
16542 fatal_insn ("bad address", op
);
16548 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
16549 reference and a constant. */
16552 symbolic_operand (rtx op
)
16554 switch (GET_CODE (op
))
16561 return (GET_CODE (op
) == SYMBOL_REF
||
16562 (GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
16563 || GET_CODE (XEXP (op
, 0)) == LABEL_REF
)
16564 && GET_CODE (XEXP (op
, 1)) == CONST_INT
);
16573 static tree branch_island_list
= 0;
16575 /* Remember to generate a branch island for far calls to the given
16579 add_compiler_branch_island (tree label_name
, tree function_name
, int line_number
)
16581 tree branch_island
= build_tree_list (function_name
, label_name
);
16582 TREE_TYPE (branch_island
) = build_int_cst (NULL_TREE
, line_number
);
16583 TREE_CHAIN (branch_island
) = branch_island_list
;
16584 branch_island_list
= branch_island
;
16587 #define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
16588 #define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
16589 #define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
16590 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
16592 /* Generate far-jump branch islands for everything on the
16593 branch_island_list. Invoked immediately after the last instruction
16594 of the epilogue has been emitted; the branch-islands must be
16595 appended to, and contiguous with, the function body. Mach-O stubs
16596 are generated in machopic_output_stub(). */
16599 macho_branch_islands (void)
16602 tree branch_island
;
16604 for (branch_island
= branch_island_list
;
16606 branch_island
= TREE_CHAIN (branch_island
))
16608 const char *label
=
16609 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island
));
16611 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island
));
16612 char name_buf
[512];
16613 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
16614 if (name
[0] == '*' || name
[0] == '&')
16615 strcpy (name_buf
, name
+1);
16619 strcpy (name_buf
+1, name
);
16621 strcpy (tmp_buf
, "\n");
16622 strcat (tmp_buf
, label
);
16623 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
16624 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
16625 fprintf (asm_out_file
, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED
"\n",
16626 BRANCH_ISLAND_LINE_NUMBER(branch_island
));
16627 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
16630 strcat (tmp_buf
, ":\n\tmflr r0\n\tbcl 20,31,");
16631 strcat (tmp_buf
, label
);
16632 strcat (tmp_buf
, "_pic\n");
16633 strcat (tmp_buf
, label
);
16634 strcat (tmp_buf
, "_pic:\n\tmflr r11\n");
16636 strcat (tmp_buf
, "\taddis r11,r11,ha16(");
16637 strcat (tmp_buf
, name_buf
);
16638 strcat (tmp_buf
, " - ");
16639 strcat (tmp_buf
, label
);
16640 strcat (tmp_buf
, "_pic)\n");
16642 strcat (tmp_buf
, "\tmtlr r0\n");
16644 strcat (tmp_buf
, "\taddi r12,r11,lo16(");
16645 strcat (tmp_buf
, name_buf
);
16646 strcat (tmp_buf
, " - ");
16647 strcat (tmp_buf
, label
);
16648 strcat (tmp_buf
, "_pic)\n");
16650 strcat (tmp_buf
, "\tmtctr r12\n\tbctr\n");
16654 strcat (tmp_buf
, ":\nlis r12,hi16(");
16655 strcat (tmp_buf
, name_buf
);
16656 strcat (tmp_buf
, ")\n\tori r12,r12,lo16(");
16657 strcat (tmp_buf
, name_buf
);
16658 strcat (tmp_buf
, ")\n\tmtctr r12\n\tbctr");
16660 output_asm_insn (tmp_buf
, 0);
16661 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
16662 if (write_symbols
== DBX_DEBUG
|| write_symbols
== XCOFF_DEBUG
)
16663 fprintf(asm_out_file
, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED
"\n",
16664 BRANCH_ISLAND_LINE_NUMBER (branch_island
));
16665 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
16668 branch_island_list
= 0;
16671 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
16672 already there or not. */
16675 no_previous_def (tree function_name
)
16677 tree branch_island
;
16678 for (branch_island
= branch_island_list
;
16680 branch_island
= TREE_CHAIN (branch_island
))
16681 if (function_name
== BRANCH_ISLAND_FUNCTION_NAME (branch_island
))
16686 /* GET_PREV_LABEL gets the label name from the previous definition of
16690 get_prev_label (tree function_name
)
16692 tree branch_island
;
16693 for (branch_island
= branch_island_list
;
16695 branch_island
= TREE_CHAIN (branch_island
))
16696 if (function_name
== BRANCH_ISLAND_FUNCTION_NAME (branch_island
))
16697 return BRANCH_ISLAND_LABEL_NAME (branch_island
);
16701 /* INSN is either a function call or a millicode call. It may have an
16702 unconditional jump in its delay slot.
16704 CALL_DEST is the routine we are calling. */
16707 output_call (rtx insn
, rtx
*operands
, int dest_operand_number
, int cookie_operand_number
)
16709 static char buf
[256];
16710 if (GET_CODE (operands
[dest_operand_number
]) == SYMBOL_REF
16711 && (INTVAL (operands
[cookie_operand_number
]) & CALL_LONG
))
16714 tree funname
= get_identifier (XSTR (operands
[dest_operand_number
], 0));
16716 if (no_previous_def (funname
))
16718 int line_number
= 0;
16719 rtx label_rtx
= gen_label_rtx ();
16720 char *label_buf
, temp_buf
[256];
16721 ASM_GENERATE_INTERNAL_LABEL (temp_buf
, "L",
16722 CODE_LABEL_NUMBER (label_rtx
));
16723 label_buf
= temp_buf
[0] == '*' ? temp_buf
+ 1 : temp_buf
;
16724 labelname
= get_identifier (label_buf
);
16725 for (; insn
&& GET_CODE (insn
) != NOTE
; insn
= PREV_INSN (insn
));
16727 line_number
= NOTE_LINE_NUMBER (insn
);
16728 add_compiler_branch_island (labelname
, funname
, line_number
);
16731 labelname
= get_prev_label (funname
);
16733 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
16734 instruction will reach 'foo', otherwise link as 'bl L42'".
16735 "L42" should be a 'branch island', that will do a far jump to
16736 'foo'. Branch islands are generated in
16737 macho_branch_islands(). */
16738 sprintf (buf
, "jbsr %%z%d,%.246s",
16739 dest_operand_number
, IDENTIFIER_POINTER (labelname
));
16742 sprintf (buf
, "bl %%z%d", dest_operand_number
);
16746 #endif /* TARGET_MACHO */
16748 /* Generate PIC and indirect symbol stubs. */
16751 machopic_output_stub (FILE *file
, const char *symb
, const char *stub
)
16753 unsigned int length
;
16754 char *symbol_name
, *lazy_ptr_name
;
16755 char *local_label_0
;
16756 static int label
= 0;
16758 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
16759 symb
= (*targetm
.strip_name_encoding
) (symb
);
16762 length
= strlen (symb
);
16763 symbol_name
= alloca (length
+ 32);
16764 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name
, symb
, length
);
16766 lazy_ptr_name
= alloca (length
+ 32);
16767 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name
, symb
, length
);
16770 machopic_picsymbol_stub1_section ();
16772 machopic_symbol_stub1_section ();
16776 fprintf (file
, "\t.align 5\n");
16778 fprintf (file
, "%s:\n", stub
);
16779 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
16782 local_label_0
= alloca (sizeof("\"L0000000000$spb\""));
16783 sprintf (local_label_0
, "\"L%011d$spb\"", label
);
16785 fprintf (file
, "\tmflr r0\n");
16786 fprintf (file
, "\tbcl 20,31,%s\n", local_label_0
);
16787 fprintf (file
, "%s:\n\tmflr r11\n", local_label_0
);
16788 fprintf (file
, "\taddis r11,r11,ha16(%s-%s)\n",
16789 lazy_ptr_name
, local_label_0
);
16790 fprintf (file
, "\tmtlr r0\n");
16791 fprintf (file
, "\tlwzu r12,lo16(%s-%s)(r11)\n",
16792 lazy_ptr_name
, local_label_0
);
16793 fprintf (file
, "\tmtctr r12\n");
16794 fprintf (file
, "\tbctr\n");
16798 fprintf (file
, "\t.align 4\n");
16800 fprintf (file
, "%s:\n", stub
);
16801 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
16803 fprintf (file
, "\tlis r11,ha16(%s)\n", lazy_ptr_name
);
16804 fprintf (file
, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name
);
16805 fprintf (file
, "\tmtctr r12\n");
16806 fprintf (file
, "\tbctr\n");
16809 machopic_lazy_symbol_ptr_section ();
16810 fprintf (file
, "%s:\n", lazy_ptr_name
);
16811 fprintf (file
, "\t.indirect_symbol %s\n", symbol_name
);
16812 fprintf (file
, "%sdyld_stub_binding_helper\n",
16813 (TARGET_64BIT
? DOUBLE_INT_ASM_OP
: "\t.long\t"));
16816 /* Legitimize PIC addresses. If the address is already
16817 position-independent, we return ORIG. Newly generated
16818 position-independent addresses go into a reg. This is REG if non
16819 zero, otherwise we allocate register(s) as necessary. */
16821 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
16824 rs6000_machopic_legitimize_pic_address (rtx orig
, enum machine_mode mode
,
16829 if (reg
== NULL
&& ! reload_in_progress
&& ! reload_completed
)
16830 reg
= gen_reg_rtx (Pmode
);
16832 if (GET_CODE (orig
) == CONST
)
16834 if (GET_CODE (XEXP (orig
, 0)) == PLUS
16835 && XEXP (XEXP (orig
, 0), 0) == pic_offset_table_rtx
)
16838 if (GET_CODE (XEXP (orig
, 0)) == PLUS
)
16840 /* Use a different reg for the intermediate value, as
16841 it will be marked UNCHANGING. */
16842 rtx reg_temp
= no_new_pseudos
? reg
: gen_reg_rtx (Pmode
);
16845 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 0),
16848 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig
, 0), 1),
16854 if (GET_CODE (offset
) == CONST_INT
)
16856 if (SMALL_INT (offset
))
16857 return plus_constant (base
, INTVAL (offset
));
16858 else if (! reload_in_progress
&& ! reload_completed
)
16859 offset
= force_reg (Pmode
, offset
);
16862 rtx mem
= force_const_mem (Pmode
, orig
);
16863 return machopic_legitimize_pic_address (mem
, Pmode
, reg
);
16866 return gen_rtx_PLUS (Pmode
, base
, offset
);
16869 /* Fall back on generic machopic code. */
16870 return machopic_legitimize_pic_address (orig
, mode
, reg
);
16873 /* This is just a placeholder to make linking work without having to
16874 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
16875 ever needed for Darwin (not too likely!) this would have to get a
16876 real definition. */
16883 #endif /* TARGET_MACHO */
16886 static unsigned int
16887 rs6000_elf_section_type_flags (tree decl
, const char *name
, int reloc
)
16889 return default_section_type_flags_1 (decl
, name
, reloc
,
16890 flag_pic
|| DEFAULT_ABI
== ABI_AIX
);
16893 /* Record an element in the table of global constructors. SYMBOL is
16894 a SYMBOL_REF of the function to be called; PRIORITY is a number
16895 between 0 and MAX_INIT_PRIORITY.
16897 This differs from default_named_section_asm_out_constructor in
16898 that we have special handling for -mrelocatable. */
16901 rs6000_elf_asm_out_constructor (rtx symbol
, int priority
)
16903 const char *section
= ".ctors";
16906 if (priority
!= DEFAULT_INIT_PRIORITY
)
16908 sprintf (buf
, ".ctors.%.5u",
16909 /* Invert the numbering so the linker puts us in the proper
16910 order; constructors are run from right to left, and the
16911 linker sorts in increasing order. */
16912 MAX_INIT_PRIORITY
- priority
);
16916 named_section_flags (section
, SECTION_WRITE
);
16917 assemble_align (POINTER_SIZE
);
16919 if (TARGET_RELOCATABLE
)
16921 fputs ("\t.long (", asm_out_file
);
16922 output_addr_const (asm_out_file
, symbol
);
16923 fputs (")@fixup\n", asm_out_file
);
16926 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
16930 rs6000_elf_asm_out_destructor (rtx symbol
, int priority
)
16932 const char *section
= ".dtors";
16935 if (priority
!= DEFAULT_INIT_PRIORITY
)
16937 sprintf (buf
, ".dtors.%.5u",
16938 /* Invert the numbering so the linker puts us in the proper
16939 order; constructors are run from right to left, and the
16940 linker sorts in increasing order. */
16941 MAX_INIT_PRIORITY
- priority
);
16945 named_section_flags (section
, SECTION_WRITE
);
16946 assemble_align (POINTER_SIZE
);
16948 if (TARGET_RELOCATABLE
)
16950 fputs ("\t.long (", asm_out_file
);
16951 output_addr_const (asm_out_file
, symbol
);
16952 fputs (")@fixup\n", asm_out_file
);
16955 assemble_integer (symbol
, POINTER_SIZE
/ BITS_PER_UNIT
, POINTER_SIZE
, 1);
16959 rs6000_elf_declare_function_name (FILE *file
, const char *name
, tree decl
)
16963 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file
);
16964 ASM_OUTPUT_LABEL (file
, name
);
16965 fputs (DOUBLE_INT_ASM_OP
, file
);
16966 rs6000_output_function_entry (file
, name
);
16967 fputs (",.TOC.@tocbase,0\n\t.previous\n", file
);
16970 fputs ("\t.size\t", file
);
16971 assemble_name (file
, name
);
16972 fputs (",24\n\t.type\t.", file
);
16973 assemble_name (file
, name
);
16974 fputs (",@function\n", file
);
16975 if (TREE_PUBLIC (decl
) && ! DECL_WEAK (decl
))
16977 fputs ("\t.globl\t.", file
);
16978 assemble_name (file
, name
);
16983 ASM_OUTPUT_TYPE_DIRECTIVE (file
, name
, "function");
16984 ASM_DECLARE_RESULT (file
, DECL_RESULT (decl
));
16985 rs6000_output_function_entry (file
, name
);
16986 fputs (":\n", file
);
16990 if (TARGET_RELOCATABLE
16991 && (get_pool_size () != 0 || current_function_profile
)
16996 (*targetm
.asm_out
.internal_label
) (file
, "LCL", rs6000_pic_labelno
);
16998 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCTOC", 1);
16999 fprintf (file
, "\t.long ");
17000 assemble_name (file
, buf
);
17002 ASM_GENERATE_INTERNAL_LABEL (buf
, "LCF", rs6000_pic_labelno
);
17003 assemble_name (file
, buf
);
17007 ASM_OUTPUT_TYPE_DIRECTIVE (file
, name
, "function");
17008 ASM_DECLARE_RESULT (file
, DECL_RESULT (decl
));
17010 if (DEFAULT_ABI
== ABI_AIX
)
17012 const char *desc_name
, *orig_name
;
17014 orig_name
= (*targetm
.strip_name_encoding
) (name
);
17015 desc_name
= orig_name
;
17016 while (*desc_name
== '.')
17019 if (TREE_PUBLIC (decl
))
17020 fprintf (file
, "\t.globl %s\n", desc_name
);
17022 fprintf (file
, "%s\n", MINIMAL_TOC_SECTION_ASM_OP
);
17023 fprintf (file
, "%s:\n", desc_name
);
17024 fprintf (file
, "\t.long %s\n", orig_name
);
17025 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file
);
17026 if (DEFAULT_ABI
== ABI_AIX
)
17027 fputs ("\t.long 0\n", file
);
17028 fprintf (file
, "\t.previous\n");
17030 ASM_OUTPUT_LABEL (file
, name
);
17036 rs6000_xcoff_asm_globalize_label (FILE *stream
, const char *name
)
17038 fputs (GLOBAL_ASM_OP
, stream
);
17039 RS6000_OUTPUT_BASENAME (stream
, name
);
17040 putc ('\n', stream
);
17044 rs6000_xcoff_asm_named_section (const char *name
, unsigned int flags
,
17045 tree decl ATTRIBUTE_UNUSED
)
17048 static const char * const suffix
[3] = { "PR", "RO", "RW" };
17050 if (flags
& SECTION_CODE
)
17052 else if (flags
& SECTION_WRITE
)
17057 fprintf (asm_out_file
, "\t.csect %s%s[%s],%u\n",
17058 (flags
& SECTION_CODE
) ? "." : "",
17059 name
, suffix
[smclass
], flags
& SECTION_ENTSIZE
);
17063 rs6000_xcoff_select_section (tree decl
, int reloc
,
17064 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
17066 if (decl_readonly_section_1 (decl
, reloc
, 1))
17068 if (TREE_PUBLIC (decl
))
17069 read_only_data_section ();
17071 read_only_private_data_section ();
17075 if (TREE_PUBLIC (decl
))
17078 private_data_section ();
17083 rs6000_xcoff_unique_section (tree decl
, int reloc ATTRIBUTE_UNUSED
)
17087 /* Use select_section for private and uninitialized data. */
17088 if (!TREE_PUBLIC (decl
)
17089 || DECL_COMMON (decl
)
17090 || DECL_INITIAL (decl
) == NULL_TREE
17091 || DECL_INITIAL (decl
) == error_mark_node
17092 || (flag_zero_initialized_in_bss
17093 && initializer_zerop (DECL_INITIAL (decl
))))
17096 name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
17097 name
= (*targetm
.strip_name_encoding
) (name
);
17098 DECL_SECTION_NAME (decl
) = build_string (strlen (name
), name
);
17101 /* Select section for constant in constant pool.
17103 On RS/6000, all constants are in the private read-only data area.
17104 However, if this is being placed in the TOC it must be output as a
17108 rs6000_xcoff_select_rtx_section (enum machine_mode mode
, rtx x
,
17109 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
17111 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x
, mode
))
17114 read_only_private_data_section ();
17117 /* Remove any trailing [DS] or the like from the symbol name. */
17119 static const char *
17120 rs6000_xcoff_strip_name_encoding (const char *name
)
17125 len
= strlen (name
);
17126 if (name
[len
- 1] == ']')
17127 return ggc_alloc_string (name
, len
- 4);
17132 /* Section attributes. AIX is always PIC. */
17134 static unsigned int
17135 rs6000_xcoff_section_type_flags (tree decl
, const char *name
, int reloc
)
17137 unsigned int align
;
17138 unsigned int flags
= default_section_type_flags_1 (decl
, name
, reloc
, 1);
17140 /* Align to at least UNIT size. */
17141 if (flags
& SECTION_CODE
)
17142 align
= MIN_UNITS_PER_WORD
;
17144 /* Increase alignment of large objects if not already stricter. */
17145 align
= MAX ((DECL_ALIGN (decl
) / BITS_PER_UNIT
),
17146 int_size_in_bytes (TREE_TYPE (decl
)) > MIN_UNITS_PER_WORD
17147 ? UNITS_PER_FP_WORD
: MIN_UNITS_PER_WORD
);
17149 return flags
| (exact_log2 (align
) & SECTION_ENTSIZE
);
17152 /* Output at beginning of assembler file.
17154 Initialize the section names for the RS/6000 at this point.
17156 Specify filename, including full path, to assembler.
17158 We want to go into the TOC section so at least one .toc will be emitted.
17159 Also, in order to output proper .bs/.es pairs, we need at least one static
17160 [RW] section emitted.
17162 Finally, declare mcount when profiling to make the assembler happy. */
17165 rs6000_xcoff_file_start (void)
17167 rs6000_gen_section_name (&xcoff_bss_section_name
,
17168 main_input_filename
, ".bss_");
17169 rs6000_gen_section_name (&xcoff_private_data_section_name
,
17170 main_input_filename
, ".rw_");
17171 rs6000_gen_section_name (&xcoff_read_only_section_name
,
17172 main_input_filename
, ".ro_");
17174 fputs ("\t.file\t", asm_out_file
);
17175 output_quoted_string (asm_out_file
, main_input_filename
);
17176 fputc ('\n', asm_out_file
);
17178 if (write_symbols
!= NO_DEBUG
)
17179 private_data_section ();
17182 fprintf (asm_out_file
, "\t.extern %s\n", RS6000_MCOUNT
);
17183 rs6000_file_start ();
17186 /* Output at end of assembler file.
17187 On the RS/6000, referencing data should automatically pull in text. */
17190 rs6000_xcoff_file_end (void)
17193 fputs ("_section_.text:\n", asm_out_file
);
17195 fputs (TARGET_32BIT
17196 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
17199 #endif /* TARGET_XCOFF */
17202 /* Cross-module name binding. Darwin does not support overriding
17203 functions at dynamic-link time. */
17206 rs6000_binds_local_p (tree decl
)
17208 return default_binds_local_p_1 (decl
, 0);
17212 /* Compute a (partial) cost for rtx X. Return true if the complete
17213 cost has been computed, and false if subexpressions should be
17214 scanned. In either case, *TOTAL contains the cost result. */
17217 rs6000_rtx_costs (rtx x
, int code
, int outer_code
, int *total
)
17219 enum machine_mode mode
= GET_MODE (x
);
17223 /* On the RS/6000, if it is valid in the insn, it is free. */
17225 if (((outer_code
== SET
17226 || outer_code
== PLUS
17227 || outer_code
== MINUS
)
17228 && (CONST_OK_FOR_LETTER_P (INTVAL (x
), 'I')
17229 || CONST_OK_FOR_LETTER_P (INTVAL (x
), 'L')))
17230 || ((outer_code
== IOR
|| outer_code
== XOR
)
17231 && (CONST_OK_FOR_LETTER_P (INTVAL (x
), 'K')
17232 || CONST_OK_FOR_LETTER_P (INTVAL (x
), 'L')))
17233 || ((outer_code
== DIV
|| outer_code
== UDIV
17234 || outer_code
== MOD
|| outer_code
== UMOD
)
17235 && exact_log2 (INTVAL (x
)) >= 0)
17236 || (outer_code
== AND
17237 && (CONST_OK_FOR_LETTER_P (INTVAL (x
), 'K')
17238 || CONST_OK_FOR_LETTER_P (INTVAL (x
), 'L')
17239 || mask_operand (x
, VOIDmode
)))
17240 || outer_code
== ASHIFT
17241 || outer_code
== ASHIFTRT
17242 || outer_code
== LSHIFTRT
17243 || outer_code
== ROTATE
17244 || outer_code
== ROTATERT
17245 || outer_code
== ZERO_EXTRACT
17246 || (outer_code
== MULT
17247 && CONST_OK_FOR_LETTER_P (INTVAL (x
), 'I'))
17248 || (outer_code
== COMPARE
17249 && (CONST_OK_FOR_LETTER_P (INTVAL (x
), 'I')
17250 || CONST_OK_FOR_LETTER_P (INTVAL (x
), 'K'))))
17255 else if ((outer_code
== PLUS
17256 && reg_or_add_cint64_operand (x
, VOIDmode
))
17257 || (outer_code
== MINUS
17258 && reg_or_sub_cint64_operand (x
, VOIDmode
))
17259 || ((outer_code
== SET
17260 || outer_code
== IOR
17261 || outer_code
== XOR
)
17263 & ~ (unsigned HOST_WIDE_INT
) 0xffffffff) == 0))
17265 *total
= COSTS_N_INSNS (1);
17272 && ((outer_code
== AND
17273 && (CONST_OK_FOR_LETTER_P (INTVAL (x
), 'K')
17274 || CONST_OK_FOR_LETTER_P (INTVAL (x
), 'L')
17275 || mask64_operand (x
, DImode
)))
17276 || ((outer_code
== IOR
|| outer_code
== XOR
)
17277 && CONST_DOUBLE_HIGH (x
) == 0
17278 && (CONST_DOUBLE_LOW (x
)
17279 & ~ (unsigned HOST_WIDE_INT
) 0xffff) == 0)))
17284 else if (mode
== DImode
17285 && (outer_code
== SET
17286 || outer_code
== IOR
17287 || outer_code
== XOR
)
17288 && CONST_DOUBLE_HIGH (x
) == 0)
17290 *total
= COSTS_N_INSNS (1);
17299 /* When optimizing for size, MEM should be slightly more expensive
17300 than generating address, e.g., (plus (reg) (const)).
17301 L1 cache latency is about two instructions. */
17302 *total
= optimize_size
? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (2);
17310 if (mode
== DFmode
)
17312 if (GET_CODE (XEXP (x
, 0)) == MULT
)
17314 /* FNMA accounted in outer NEG. */
17315 if (outer_code
== NEG
)
17316 *total
= rs6000_cost
->dmul
- rs6000_cost
->fp
;
17318 *total
= rs6000_cost
->dmul
;
17321 *total
= rs6000_cost
->fp
;
17323 else if (mode
== SFmode
)
17325 /* FNMA accounted in outer NEG. */
17326 if (outer_code
== NEG
&& GET_CODE (XEXP (x
, 0)) == MULT
)
17329 *total
= rs6000_cost
->fp
;
17331 else if (GET_CODE (XEXP (x
, 0)) == MULT
)
17333 /* The rs6000 doesn't have shift-and-add instructions. */
17334 rs6000_rtx_costs (XEXP (x
, 0), MULT
, PLUS
, total
);
17335 *total
+= COSTS_N_INSNS (1);
17338 *total
= COSTS_N_INSNS (1);
17342 if (mode
== DFmode
)
17344 if (GET_CODE (XEXP (x
, 0)) == MULT
)
17346 /* FNMA accounted in outer NEG. */
17347 if (outer_code
== NEG
)
17350 *total
= rs6000_cost
->dmul
;
17353 *total
= rs6000_cost
->fp
;
17355 else if (mode
== SFmode
)
17357 /* FNMA accounted in outer NEG. */
17358 if (outer_code
== NEG
&& GET_CODE (XEXP (x
, 0)) == MULT
)
17361 *total
= rs6000_cost
->fp
;
17363 else if (GET_CODE (XEXP (x
, 0)) == MULT
)
17365 /* The rs6000 doesn't have shift-and-sub instructions. */
17366 rs6000_rtx_costs (XEXP (x
, 0), MULT
, MINUS
, total
);
17367 *total
+= COSTS_N_INSNS (1);
17370 *total
= COSTS_N_INSNS (1);
17374 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
17376 if (INTVAL (XEXP (x
, 1)) >= -256
17377 && INTVAL (XEXP (x
, 1)) <= 255)
17378 *total
= rs6000_cost
->mulsi_const9
;
17380 *total
= rs6000_cost
->mulsi_const
;
17382 /* FMA accounted in outer PLUS/MINUS. */
17383 else if ((mode
== DFmode
|| mode
== SFmode
)
17384 && (outer_code
== PLUS
|| outer_code
== MINUS
))
17386 else if (mode
== DFmode
)
17387 *total
= rs6000_cost
->dmul
;
17388 else if (mode
== SFmode
)
17389 *total
= rs6000_cost
->fp
;
17390 else if (mode
== DImode
)
17391 *total
= rs6000_cost
->muldi
;
17393 *total
= rs6000_cost
->mulsi
;
17398 if (FLOAT_MODE_P (mode
))
17400 *total
= mode
== DFmode
? rs6000_cost
->ddiv
17401 : rs6000_cost
->sdiv
;
17408 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
17409 && exact_log2 (INTVAL (XEXP (x
, 1))) >= 0)
17411 if (code
== DIV
|| code
== MOD
)
17413 *total
= COSTS_N_INSNS (2);
17416 *total
= COSTS_N_INSNS (1);
17420 if (GET_MODE (XEXP (x
, 1)) == DImode
)
17421 *total
= rs6000_cost
->divdi
;
17423 *total
= rs6000_cost
->divsi
;
17425 /* Add in shift and subtract for MOD. */
17426 if (code
== MOD
|| code
== UMOD
)
17427 *total
+= COSTS_N_INSNS (2);
17431 *total
= COSTS_N_INSNS (4);
17435 if (outer_code
== AND
|| outer_code
== IOR
|| outer_code
== XOR
)
17446 *total
= COSTS_N_INSNS (1);
17454 /* Handle mul_highpart. */
17455 if (outer_code
== TRUNCATE
17456 && GET_CODE (XEXP (x
, 0)) == MULT
)
17458 if (mode
== DImode
)
17459 *total
= rs6000_cost
->muldi
;
17461 *total
= rs6000_cost
->mulsi
;
17464 else if (outer_code
== AND
)
17467 *total
= COSTS_N_INSNS (1);
17472 if (GET_CODE (XEXP (x
, 0)) == MEM
)
17475 *total
= COSTS_N_INSNS (1);
17481 if (!FLOAT_MODE_P (mode
))
17483 *total
= COSTS_N_INSNS (1);
17489 case UNSIGNED_FLOAT
:
17493 case FLOAT_TRUNCATE
:
17494 *total
= rs6000_cost
->fp
;
17498 switch (XINT (x
, 1))
17501 *total
= rs6000_cost
->fp
;
17513 *total
= COSTS_N_INSNS (1);
17516 else if (FLOAT_MODE_P (mode
)
17517 && TARGET_PPC_GFXOPT
&& TARGET_HARD_FLOAT
&& TARGET_FPRS
)
17519 *total
= rs6000_cost
->fp
;
17532 /* A C expression returning the cost of moving data from a register of class
17533 CLASS1 to one of CLASS2. */
17536 rs6000_register_move_cost (enum machine_mode mode
,
17537 enum reg_class from
, enum reg_class to
)
17539 /* Moves from/to GENERAL_REGS. */
17540 if (reg_classes_intersect_p (to
, GENERAL_REGS
)
17541 || reg_classes_intersect_p (from
, GENERAL_REGS
))
17543 if (! reg_classes_intersect_p (to
, GENERAL_REGS
))
17546 if (from
== FLOAT_REGS
|| from
== ALTIVEC_REGS
)
17547 return (rs6000_memory_move_cost (mode
, from
, 0)
17548 + rs6000_memory_move_cost (mode
, GENERAL_REGS
, 0));
17550 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift.... */
17551 else if (from
== CR_REGS
)
17555 /* A move will cost one instruction per GPR moved. */
17556 return 2 * HARD_REGNO_NREGS (0, mode
);
17559 /* Moving between two similar registers is just one instruction. */
17560 else if (reg_classes_intersect_p (to
, from
))
17561 return mode
== TFmode
? 4 : 2;
17563 /* Everything else has to go through GENERAL_REGS. */
17565 return (rs6000_register_move_cost (mode
, GENERAL_REGS
, to
)
17566 + rs6000_register_move_cost (mode
, from
, GENERAL_REGS
));
17569 /* A C expressions returning the cost of moving data of MODE from a register to
17573 rs6000_memory_move_cost (enum machine_mode mode
, enum reg_class
class,
17574 int in ATTRIBUTE_UNUSED
)
17576 if (reg_classes_intersect_p (class, GENERAL_REGS
))
17577 return 4 * HARD_REGNO_NREGS (0, mode
);
17578 else if (reg_classes_intersect_p (class, FLOAT_REGS
))
17579 return 4 * HARD_REGNO_NREGS (32, mode
);
17580 else if (reg_classes_intersect_p (class, ALTIVEC_REGS
))
17581 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO
, mode
);
17583 return 4 + rs6000_register_move_cost (mode
, class, GENERAL_REGS
);
17586 /* Return an RTX representing where to find the function value of a
17587 function returning MODE. */
17589 rs6000_complex_function_value (enum machine_mode mode
)
17591 unsigned int regno
;
17593 enum machine_mode inner
= GET_MODE_INNER (mode
);
17594 unsigned int inner_bytes
= GET_MODE_SIZE (inner
);
17596 if (FLOAT_MODE_P (mode
) && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
17597 regno
= FP_ARG_RETURN
;
17600 regno
= GP_ARG_RETURN
;
17602 /* 32-bit is OK since it'll go in r3/r4. */
17603 if (TARGET_32BIT
&& inner_bytes
>= 4)
17604 return gen_rtx_REG (mode
, regno
);
17607 if (inner_bytes
>= 8)
17608 return gen_rtx_REG (mode
, regno
);
17610 r1
= gen_rtx_EXPR_LIST (inner
, gen_rtx_REG (inner
, regno
),
17612 r2
= gen_rtx_EXPR_LIST (inner
, gen_rtx_REG (inner
, regno
+ 1),
17613 GEN_INT (inner_bytes
));
17614 return gen_rtx_PARALLEL (mode
, gen_rtvec (2, r1
, r2
));
17617 /* Define how to find the value returned by a function.
17618 VALTYPE is the data type of the value (as a tree).
17619 If the precise function being called is known, FUNC is its FUNCTION_DECL;
17620 otherwise, FUNC is 0.
17622 On the SPE, both FPs and vectors are returned in r3.
17624 On RS/6000 an integer value is in r3 and a floating-point value is in
17625 fp1, unless -msoft-float. */
17628 rs6000_function_value (tree valtype
, tree func ATTRIBUTE_UNUSED
)
17630 enum machine_mode mode
;
17631 unsigned int regno
;
17633 if (TARGET_32BIT
&& TARGET_POWERPC64
&& TYPE_MODE (valtype
) == DImode
)
17635 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
17636 return gen_rtx_PARALLEL (DImode
,
17638 gen_rtx_EXPR_LIST (VOIDmode
,
17639 gen_rtx_REG (SImode
, GP_ARG_RETURN
),
17641 gen_rtx_EXPR_LIST (VOIDmode
,
17642 gen_rtx_REG (SImode
,
17643 GP_ARG_RETURN
+ 1),
17647 if ((INTEGRAL_TYPE_P (valtype
)
17648 && TYPE_PRECISION (valtype
) < BITS_PER_WORD
)
17649 || POINTER_TYPE_P (valtype
))
17650 mode
= TARGET_32BIT
? SImode
: DImode
;
17652 mode
= TYPE_MODE (valtype
);
17654 if (SCALAR_FLOAT_TYPE_P (valtype
) && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
17655 regno
= FP_ARG_RETURN
;
17656 else if (TREE_CODE (valtype
) == COMPLEX_TYPE
17657 && targetm
.calls
.split_complex_arg
)
17658 return rs6000_complex_function_value (mode
);
17659 else if (TREE_CODE (valtype
) == VECTOR_TYPE
17660 && TARGET_ALTIVEC
&& TARGET_ALTIVEC_ABI
17661 && ALTIVEC_VECTOR_MODE(mode
))
17662 regno
= ALTIVEC_ARG_RETURN
;
17664 regno
= GP_ARG_RETURN
;
17666 return gen_rtx_REG (mode
, regno
);
17669 /* Define how to find the value returned by a library function
17670 assuming the value has mode MODE. */
17672 rs6000_libcall_value (enum machine_mode mode
)
17674 unsigned int regno
;
17676 if (TARGET_32BIT
&& TARGET_POWERPC64
&& mode
== DImode
)
17678 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
17679 return gen_rtx_PARALLEL (DImode
,
17681 gen_rtx_EXPR_LIST (VOIDmode
,
17682 gen_rtx_REG (SImode
, GP_ARG_RETURN
),
17684 gen_rtx_EXPR_LIST (VOIDmode
,
17685 gen_rtx_REG (SImode
,
17686 GP_ARG_RETURN
+ 1),
17690 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
17691 && TARGET_HARD_FLOAT
&& TARGET_FPRS
)
17692 regno
= FP_ARG_RETURN
;
17693 else if (ALTIVEC_VECTOR_MODE (mode
)
17694 && TARGET_ALTIVEC
&& TARGET_ALTIVEC_ABI
)
17695 regno
= ALTIVEC_ARG_RETURN
;
17696 else if (COMPLEX_MODE_P (mode
) && targetm
.calls
.split_complex_arg
)
17697 return rs6000_complex_function_value (mode
);
17699 regno
= GP_ARG_RETURN
;
17701 return gen_rtx_REG (mode
, regno
);
17704 /* Define the offset between two registers, FROM to be eliminated and its
17705 replacement TO, at the start of a routine. */
17707 rs6000_initial_elimination_offset (int from
, int to
)
17709 rs6000_stack_t
*info
= rs6000_stack_info ();
17710 HOST_WIDE_INT offset
;
17712 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
17713 offset
= info
->push_p
? 0 : -info
->total_size
;
17714 else if (from
== ARG_POINTER_REGNUM
&& to
== FRAME_POINTER_REGNUM
)
17715 offset
= info
->total_size
;
17716 else if (from
== ARG_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
17717 offset
= info
->push_p
? info
->total_size
: 0;
17718 else if (from
== RS6000_PIC_OFFSET_TABLE_REGNUM
)
17726 /* Return true if TYPE is of type __ev64_opaque__. */
17729 is_ev64_opaque_type (tree type
)
17732 && (type
== opaque_V2SI_type_node
17733 || type
== opaque_V2SF_type_node
17734 || type
== opaque_p_V2SI_type_node
));
17738 rs6000_dwarf_register_span (rtx reg
)
17742 if (!TARGET_SPE
|| !SPE_VECTOR_MODE (GET_MODE (reg
)))
17745 regno
= REGNO (reg
);
17747 /* The duality of the SPE register size wreaks all kinds of havoc.
17748 This is a way of distinguishing r0 in 32-bits from r0 in
17751 gen_rtx_PARALLEL (VOIDmode
,
17754 gen_rtx_REG (SImode
, regno
+ 1200),
17755 gen_rtx_REG (SImode
, regno
))
17757 gen_rtx_REG (SImode
, regno
),
17758 gen_rtx_REG (SImode
, regno
+ 1200)));
17761 /* Map internal gcc register numbers to DWARF2 register numbers. */
17764 rs6000_dbx_register_number (unsigned int regno
)
17766 if (regno
<= 63 || write_symbols
!= DWARF2_DEBUG
)
17768 if (regno
== MQ_REGNO
)
17770 if (regno
== LINK_REGISTER_REGNUM
)
17772 if (regno
== COUNT_REGISTER_REGNUM
)
17774 if (CR_REGNO_P (regno
))
17775 return regno
- CR0_REGNO
+ 86;
17776 if (regno
== XER_REGNO
)
17778 if (ALTIVEC_REGNO_P (regno
))
17779 return regno
- FIRST_ALTIVEC_REGNO
+ 1124;
17780 if (regno
== VRSAVE_REGNO
)
17782 if (regno
== VSCR_REGNO
)
17784 if (regno
== SPE_ACC_REGNO
)
17786 if (regno
== SPEFSCR_REGNO
)
17788 /* SPE high reg number. We get these values of regno from
17789 rs6000_dwarf_register_span. */
17790 if (regno
>= 1200 && regno
< 1232)
17796 /* target hook eh_return_filter_mode */
17797 static enum machine_mode
17798 rs6000_eh_return_filter_mode (void)
17800 return TARGET_32BIT
? SImode
: word_mode
;
17803 /* Target hook for vector_mode_supported_p. */
17805 rs6000_vector_mode_supported_p (enum machine_mode mode
)
17808 if (TARGET_SPE
&& SPE_VECTOR_MODE (mode
))
17811 else if (TARGET_ALTIVEC
&& ALTIVEC_VECTOR_MODE (mode
))
17818 #include "gt-rs6000.h"