+2010-06-16 Joern Rennecke <joern.rennecke@embecosm.com>
+
+ PR middle-end/46500
+ * doc/tm.texi.in: Update Copyright date.
+ * doc/tm.texi: Regenerate.
+ * targhooks.c (default_setup_incoming_varargs): Replace
+ CUMULATIVE_ARGS* argument type with cumulative_args_t.
+ (default_pretend_outgoing_varargs_named): Likewise.
+ (hook_pass_by_reference_must_pass_in_stack): Likewise.
+ (hook_callee_copies_named): Likewise.
+ (default_function_arg_advance): Likewise.
+ (default_function_arg): Likewise.
+ (default_function_incoming_arg): Likewise.
+ (hook_bool_CUMULATIVE_ARGS_false): Likewise.
+ (hook_bool_CUMULATIVE_ARGS_true): Likewise.
+ (hook_bool_CUMULATIVE_ARGS_mode_tree_bool_false): Likewise.
+ (hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true): Likewise.
+ (hook_int_CUMULATIVE_ARGS_mode_tree_bool_0): Likewise.
+ * targhooks.h (default_setup_incoming_varargs): Likewise.
+ (default_pretend_outgoing_varargs_named): Likewise.
+ (hook_pass_by_reference_must_pass_in_stack): Likewise.
+ (hook_callee_copies_named): Likewise.
+ (default_function_arg_advance): Likewise.
+ (default_function_arg): Likewise.
+ (default_function_incoming_arg): Likewise.
+ (hook_bool_CUMULATIVE_ARGS_false): Likewise.
+ (hook_bool_CUMULATIVE_ARGS_true): Likewise.
+ (hook_bool_CUMULATIVE_ARGS_mode_tree_bool_false): Likewise.
+ (hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true): Likewise.
+ (hook_int_CUMULATIVE_ARGS_mode_tree_bool_0): Likewise.
+ * target.def (pass_by_reference): Likewise.
+ (setup_incoming_varargs, strict_argument_naming): Likewise.
+ (pretend_outgoing_varargs_named, callee_copies): Likewise.
+ (arg_partial_bytes, function_arg_advance, function_arg): Likewise.
+ (function_incoming_arg): Likewise.
+ * target.h: Don't include "tm.h" .
+ (cumulative_args_t): New typedef.
+ [GCC_TM_H] (get_cumulative_args): New static inline function.
+ [GCC_TM_H] (pack_cumulative_args): Likewise.
+ * config/alpha/alpha.c (alpha_function_arg): Replace CUMULATIVE_ARGS*
+ argument type with cumulative_args_t.
+ (alpha_function_arg_advance, alpha_arg_partial_bytes): Likewise.
+ (alpha_pass_by_reference, alpha_setup_incoming_varargs): Likewise.
+ * config/frv/frv.c (frv_setup_incoming_varargs): Likewise.
+ (frv_arg_partial_bytes, frv_function_arg): Likewise.
+ (frv_function_incoming_arg, frv_function_arg_advance): Likewise.
+ (frv_function_arg_1): Likewise.
+ * config/s390/s390.c (s390_pass_by_reference): Likewise.
+ (s390_function_arg_advance, s390_function_arg): Likewise.
+ * config/m32c/m32c.c (m32c_function_arg): Likewise.
+ (m32c_pass_by_reference, m32c_function_arg_advance): Likewise.
+ (m32c_strict_argument_naming): Likewise.
+ * config/spu/spu.c (spu_pass_by_reference, spu_function_arg): Likewise.
+ (spu_function_arg_advance): Likewise.
+ (spu_setup_incoming_varargs): Likewise. Make static.
+ * config/spu/spu-protos.h (spu_setup_incoming_varargs):
+ Remove prototype.
+ * config/sparc/sparc.c (sparc_strict_argument_naming): Replace
+ CUMULATIVE_ARGS* argument type with cumulative_args_t.
+ (sparc_pass_by_reference, sparc_function_arg_advance): Likewise.
+ (sparc_function_arg, sparc_function_incoming_arg): Likewise.
+ (sparc_arg_partial_bytes, sparc_function_arg_1): Likewise.
+ * config/mep/mep.c (mep_setup_incoming_varargs): Likewise.
+ (mep_pass_by_reference, mep_function_arg): Likewise.
+ (mep_function_arg_advance): Likewise.
+ * config/m32r/m32r.c (m32r_setup_incoming_varargs): Likewise.
+ (m32r_pass_by_reference, m32r_arg_partial_bytes): Likewise.
+ (m32r_function_arg, m32r_function_arg_advance): Likewise.
+ * config/rx/rx.c (rx_function_arg, rx_function_arg_advance): Likewise.
+ * config/i386/i386.c (ix86_function_arg_advance): Likewise.
+ (ix86_function_arg, ix86_pass_by_reference): Likewise.
+ (ix86_setup_incoming_varargs): Likewise.
+ * config/sh/sh.c (sh_setup_incoming_varargs): Likewise.
+ (sh_strict_argument_naming): Likewise.
+ (sh_pretend_outgoing_varargs_named, sh_pass_by_reference): Likewise.
+ (sh_callee_copies, sh_arg_partial_bytes): Likewise.
+ (sh_function_arg_advance, sh_function_arg): Likewise.
+ * config/pdp11/pdp11.c (pdp11_function_arg): Likewise.
+ (pdp11_function_arg_advance): Likewise.
+ * config/microblaze/microblaze.c (microblaze_function_arg_advance):
+ Likewise.
+ (microblaze_function_arg, function_arg_partial_bytes): Likewise.
+ * config/avr/avr.c (avr_function_arg): Likewise.
+ (avr_function_arg_advance): Likewise.
+ * config/xtensa/xtensa.c (xtensa_function_arg_advance): Likewise.
+ (xtensa_function_arg, xtensa_function_incoming_arg): Likewise.
+ (xtensa_function_arg_1): Likewise.
+ * config/stormy16/stormy16.c (xstormy16_function_arg_advance): Likewise.
+ (xstormy16_function_arg): Likewise.
+ * config/fr30/fr30.c (fr30_setup_incoming_varargs): Likewise.
+ (fr30_arg_partial_bytes, fr30_function_arg): Likewise.
+ (fr30_function_arg_advance): Likewise.
+ * config/lm32/lm32.c (lm32_setup_incoming_varargs): Likewise.
+ (lm32_function_arg, lm32_function_arg_advance): Likewise.
+ * config/moxie/moxie.c (moxie_setup_incoming_varargs): Likewise.
+ (moxie_function_arg, moxie_function_arg_advance): Likewise.
+ (moxie_pass_by_reference, moxie_arg_partial_bytes): Likewise.
+ * config/cris/cris.c (cris_setup_incoming_varargs): Likewise.
+ (cris_pass_by_reference, cris_arg_partial_bytes): Likewise.
+ (cris_function_arg, cris_function_incoming_arg): Likewise.
+ (cris_function_arg_advance, cris_function_arg_1): Likewise.
+ * config/iq2000/iq2000.c (iq2000_setup_incoming_varargs): Likewise.
+ (iq2000_pass_by_reference, iq2000_arg_partial_bytes): Likewise.
+ (iq2000_function_arg, iq2000_function_arg_advance): Likewise.
+ * config/mn10300/mn10300.c (mn10300_pass_by_reference): Likewise.
+ (mn10300_function_arg, mn10300_function_arg_advance): Likewise.
+ (mn10300_arg_partial_bytes): Likewise.
+ * config/ia64/ia64.c (ia64_setup_incoming_varargs): Likewise.
+ (ia64_arg_partial_bytes, ia64_function_arg): Likewise.
+ (ia64_function_incoming_arg, ia64_function_arg_advance): Likewise.
+ (ia64_function_arg_1): Likewise.
+ * config/m68k/m68k.c (m68k_function_arg_advance): Likewise.
+ (m68k_function_arg): Likewise.
+ * config/rs6000/rs6000.c (rs6000_function_arg_advance): Likewise.
+ (rs6000_function_arg, setup_incoming_varargs): Likewise.
+ (rs6000_pass_by_reference, rs6000_arg_partial_bytes): Likewise.
+ * config/picochip/picochip.c (picochip_arg_partial_bytes): Likewise.
+ (picochip_function_arg, picochip_incoming_function_arg): Likewise.
+ (picochip_arg_advance): Likewise.
+ * config/mcore/mcore.c (mcore_setup_incoming_varargs): Likewise.
+ (mcore_arg_partial_bytes, mcore_function_arg): Likewise.
+ (mcore_function_arg_advance): Likewise.
+ * config/score/score.c (score_pass_by_reference): Likewise.
+ (score_function_arg_advance): Likewise.
+ (score_arg_partial_bytes): Likewise. Make static.
+ * config/score/score-protos.h (score_arg_partial_bytes): Don't declare.
+ * config/arm/arm.c (arm_arg_partial_bytes): Replace
+ CUMULATIVE_ARGS* argument type with cumulative_args_t.
+ (arm_function_arg, arm_function_arg_advance): Likewise.
+ (arm_setup_incoming_varargs, arm_pass_by_reference): Likewise.
+ * config/pa/pa.c (pa_pass_by_reference): Likewise.
+ (pa_arg_partial_bytes, pa_function_arg_advance): Likewise.
+ (pa_function_arg): Likewise.
+ * config/mips/mips.c (mips_strict_argument_naming): Likewise.
+ (mips_function_arg, mips_function_arg_advance): Likewise.
+ (mips_arg_partial_bytes, mips_pass_by_reference): Likewise.
+ (mips_callee_copies, mips_setup_incoming_varargs): Likewise.
+ * config/vax/vax.c (vax_function_arg): Likewise.
+ (vax_function_arg_advance): Likewise.
+ * config/h8300/h8300.c (h8300_function_arg): Likewise.
+ (h8300_function_arg_advance): Likewise.
+ * config/v850/v850.c (v850_pass_by_reference): Likewise.
+ (v850_strict_argument_naming, v850_function_arg): Likewise.
+ (v850_arg_partial_bytes, v850_function_arg_advance): Likewise.
+ (v850_setup_incoming_varargs): Likewise.
+ * config/mmix/mmix.c (mmix_setup_incoming_varargs): Likewise.
+ (mmix_function_arg_advance, mmix_function_incoming_arg): Likewise.
+ (mmix_function_arg, mmix_pass_by_reference): Likewise.
+ (mmix_function_arg_1): Replace const CUMULATIVE_ARGS* argument type
+ with const void *.
+ * config/bfin/bfin.c (setup_incoming_varargs): Replace
+ CUMULATIVE_ARGS* argument type with cumulative_args_t.
+ (bfin_function_arg_advance, bfin_function_arg): Likewise.
+ (bfin_arg_partial_bytes, bfin_pass_by_reference): Likewise.
+ * calls.c (emit_call_1): Change type of args_so_far to
+ cumulative_args_t. Changed all callers.
+ (initialize_argument_information): Likewise.
+ (expand_call, emit_library_call_value_1): Use pack_cumulative_args.
+ * dse.c (get_call_args): Likewise.
+ * expr.c (block_move_libcall_safe_for_call_parm): Likewise.
+ * function.c (pass_by_reference, reference_callee_copied): Likewise.
+ (struct assign_parm_data_all): Rename args_so_far to args_so_far_v.
+ New member args_so_far_v. Changed all users.
+ * var-tracking.c (prepare_call_arguments): Use pack_cumulative_args.
+ * config/iq2000/iq2000.c (iq2000_expand_prologue): Likewise.
+ * config/mips/mips.c (mips_output_args_xfer): Likewise.
+ * config/s390/s390.c (s390_call_saved_register_used): Likewise.
+ * config/sh/sh.c (sh_output_mi_thunk): Likewise.
+ * config/microblaze/microblaze.c (microblaze_expand_prologue): Likewise.
+ * config/m32r/m32r.c (m32r_return_in_memory): Adjust for changed
+ m32r_pass_by_reference.
+
2011-06-16 Ira Rosen <ira.rosen@linaro.org>
* tree-vectorizer.h (vect_recog_func_ptr): Change the first
+2010-06-16 Joern Rennecke <joern.rennecke@embecosm.com>
+
+ PR middle-end/46500
+ * gcc-interface/decl.c (gnat_to_gnu_param): Use pack_cumulative_args.
+
2011-06-14 Joseph Myers <joseph@codesourcery.com>
* gcc-interface/Make-lang.in (gnatbind$(exeext)): Use ggc-none.o.
passed by reference. Pass them by explicit reference, this will
generate more debuggable code at -O0. */
if (TYPE_IS_FAT_POINTER_P (gnu_param_type)
- && targetm.calls.pass_by_reference (NULL,
+ && targetm.calls.pass_by_reference (pack_cumulative_args (NULL),
TYPE_MODE (gnu_param_type),
gnu_param_type,
true))
static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
HOST_WIDE_INT, rtx, rtx, int, rtx, int,
- CUMULATIVE_ARGS *);
+ cumulative_args_t);
static void precompute_register_parameters (int, struct arg_data *, int *);
static int store_one_arg (struct arg_data *, rtx, int, int, int);
static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
static void initialize_argument_information (int, struct arg_data *,
struct args_size *, int,
tree, tree,
- tree, tree, CUMULATIVE_ARGS *, int,
+ tree, tree, cumulative_args_t, int,
rtx *, int *, int *, int *,
bool *, bool);
static void compute_argument_addresses (struct arg_data *, rtx, int);
HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
- CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
+ cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
{
rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
rtx call_insn, call, funmem;
= targetm.calls.return_pops_args (fndecl, funtype, stack_size);
#ifdef CALL_POPS_ARGS
- n_popped += CALL_POPS_ARGS (* args_so_far);
+ n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
#endif
/* Ensure address is valid. SYMBOL_REF is already valid, so no need,
int n_named_args ATTRIBUTE_UNUSED,
tree exp, tree struct_value_addr_value,
tree fndecl, tree fntype,
- CUMULATIVE_ARGS *args_so_far,
+ cumulative_args_t args_so_far,
int reg_parm_stack_space,
rtx *old_stack_level, int *old_pending_adj,
int *must_preallocate, int *ecf_flags,
bool *may_tailcall, bool call_from_thunk_p)
{
+ CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
location_t loc = EXPR_LOCATION (exp);
/* 1 if scanning parms front to back, -1 if scanning back to front. */
int inc;
with those made by function.c. */
/* See if this argument should be passed by invisible reference. */
- if (pass_by_reference (args_so_far, TYPE_MODE (type),
+ if (pass_by_reference (args_so_far_pnt, TYPE_MODE (type),
type, argpos < n_named_args))
{
bool callee_copies;
tree base;
callee_copies
- = reference_callee_copied (args_so_far, TYPE_MODE (type),
+ = reference_callee_copied (args_so_far_pnt, TYPE_MODE (type),
type, argpos < n_named_args);
/* If we're compiling a thunk, pass through invisible references
/* Size of arguments before any adjustments (such as rounding). */
int unadjusted_args_size;
/* Data on reg parms scanned so far. */
- CUMULATIVE_ARGS args_so_far;
+ CUMULATIVE_ARGS args_so_far_v;
+ cumulative_args_t args_so_far;
/* Nonzero if a reg parm has been scanned. */
int reg_parm_seen;
/* Nonzero if this is an indirect function call. */
calling convention than normal calls. The fourth argument in
INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
or not. */
- INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl, n_named_args);
+ INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
+ args_so_far = pack_cumulative_args (&args_so_far_v);
/* Now possibly adjust the number of named args.
Normally, don't include the last named arg if anonymous args follow.
registers, so we must force them into memory. */
if (type_arg_types != 0
- && targetm.calls.strict_argument_naming (&args_so_far))
+ && targetm.calls.strict_argument_naming (args_so_far))
;
else if (type_arg_types != 0
- && ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far))
+ && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
/* Don't include the last named arg. */
--n_named_args;
else
initialize_argument_information (num_actuals, args, &args_size,
n_named_args, exp,
structure_value_addr_value, fndecl, fntype,
- &args_so_far, reg_parm_stack_space,
+ args_so_far, reg_parm_stack_space,
&old_stack_level, &old_pending_adj,
&must_preallocate, &flags,
&try_tail_call, CALL_FROM_THUNK_P (exp));
/* Set up next argument register. For sibling calls on machines
with register windows this should be the incoming register. */
if (pass == 0)
- next_arg_reg = targetm.calls.function_incoming_arg (&args_so_far,
+ next_arg_reg = targetm.calls.function_incoming_arg (args_so_far,
VOIDmode,
void_type_node,
true);
else
- next_arg_reg = targetm.calls.function_arg (&args_so_far,
+ next_arg_reg = targetm.calls.function_arg (args_so_far,
VOIDmode, void_type_node,
true);
emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
adjusted_args_size.constant, struct_value_size,
next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
- flags, & args_so_far);
+ flags, args_so_far);
/* If the call setup or the call itself overlaps with anything
of the argument setup we probably clobbered our call address.
int inc;
int count;
rtx argblock = 0;
- CUMULATIVE_ARGS args_so_far;
+ CUMULATIVE_ARGS args_so_far_v;
+ cumulative_args_t args_so_far;
struct arg
{
rtx value;
memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
#ifdef INIT_CUMULATIVE_LIBCALL_ARGS
- INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
+ INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
#else
- INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0, nargs);
+ INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
#endif
+ args_so_far = pack_cumulative_args (&args_so_far_v);
args_size.constant = 0;
args_size.var = 0;
argvec[count].mode = Pmode;
argvec[count].partial = 0;
- argvec[count].reg = targetm.calls.function_arg (&args_so_far,
+ argvec[count].reg = targetm.calls.function_arg (args_so_far,
Pmode, NULL_TREE, true);
- gcc_assert (targetm.calls.arg_partial_bytes (&args_so_far, Pmode,
+ gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, Pmode,
NULL_TREE, 1) == 0);
locate_and_pad_parm (Pmode, NULL_TREE,
|| reg_parm_stack_space > 0)
args_size.constant += argvec[count].locate.size.constant;
- targetm.calls.function_arg_advance (&args_so_far, Pmode, (tree) 0, true);
+ targetm.calls.function_arg_advance (args_so_far, Pmode, (tree) 0, true);
count++;
}
&& !(CONSTANT_P (val) && targetm.legitimate_constant_p (mode, val)))
val = force_operand (val, NULL_RTX);
- if (pass_by_reference (&args_so_far, mode, NULL_TREE, 1))
+ if (pass_by_reference (&args_so_far_v, mode, NULL_TREE, 1))
{
rtx slot;
int must_copy
- = !reference_callee_copied (&args_so_far, mode, NULL_TREE, 1);
+ = !reference_callee_copied (&args_so_far_v, mode, NULL_TREE, 1);
/* If this was a CONST function, it is now PURE since it now
reads memory. */
mode = promote_function_mode (NULL_TREE, mode, &unsigned_p, NULL_TREE, 0);
argvec[count].mode = mode;
argvec[count].value = convert_modes (mode, GET_MODE (val), val, unsigned_p);
- argvec[count].reg = targetm.calls.function_arg (&args_so_far, mode,
+ argvec[count].reg = targetm.calls.function_arg (args_so_far, mode,
NULL_TREE, true);
argvec[count].partial
- = targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL_TREE, 1);
+ = targetm.calls.arg_partial_bytes (args_so_far, mode, NULL_TREE, 1);
locate_and_pad_parm (mode, NULL_TREE,
#ifdef STACK_PARMS_IN_REG_PARM_AREA
|| reg_parm_stack_space > 0)
args_size.constant += argvec[count].locate.size.constant;
- targetm.calls.function_arg_advance (&args_so_far, mode, (tree) 0, true);
+ targetm.calls.function_arg_advance (args_so_far, mode, (tree) 0, true);
}
/* If this machine requires an external definition for library
build_function_type (tfom, NULL_TREE),
original_args_size.constant, args_size.constant,
struct_value_size,
- targetm.calls.function_arg (&args_so_far,
+ targetm.calls.function_arg (args_so_far,
VOIDmode, void_type_node, true),
valreg,
- old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
+ old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
/* For calls to `setjmp', etc., inform function.c:setjmp_warnings
that it should complain if nonvolatile values are live. For
and the rest are pushed. */
static rtx
-alpha_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+alpha_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
int basereg;
int num_args;
(TYPE is null for libcalls where that information may not be available.) */
static void
-alpha_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+alpha_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
bool onstack = targetm.calls.must_pass_in_stack (mode, type);
int increment = onstack ? 6 : ALPHA_ARG_SIZE (mode, type, named);
}
static int
-alpha_arg_partial_bytes (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
+alpha_arg_partial_bytes (cumulative_args_t cum_v,
enum machine_mode mode ATTRIBUTE_UNUSED,
tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
int words = 0;
+ CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED = get_cumulative_args (cum_v);
#if TARGET_ABI_OPEN_VMS
if (cum->num_args < 6
/* Return true if TYPE should be passed by invisible reference. */
static bool
-alpha_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
+alpha_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED,
enum machine_mode mode,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
variable number of arguments. */
static void
-alpha_setup_incoming_varargs (CUMULATIVE_ARGS *pcum, enum machine_mode mode,
+alpha_setup_incoming_varargs (cumulative_args_t pcum, enum machine_mode mode,
tree type, int *pretend_size, int no_rtl)
{
- CUMULATIVE_ARGS cum = *pcum;
+ CUMULATIVE_ARGS cum = *get_cumulative_args (pcum);
/* Skip the current argument. */
- targetm.calls.function_arg_advance (&cum, mode, type, true);
+ targetm.calls.function_arg_advance (pack_cumulative_args (&cum), mode, type,
+ true);
#if TARGET_ABI_OPEN_VMS
/* For VMS, we allocate space for all 6 arg registers plus a count.
static tree arm_builtin_decl (unsigned, bool);
static void emit_constant_insn (rtx cond, rtx pattern);
static rtx emit_set_insn (rtx, rtx);
-static int arm_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
+static int arm_arg_partial_bytes (cumulative_args_t, enum machine_mode,
tree, bool);
-static rtx arm_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
+static rtx arm_function_arg (cumulative_args_t, enum machine_mode,
const_tree, bool);
-static void arm_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
+static void arm_function_arg_advance (cumulative_args_t, enum machine_mode,
const_tree, bool);
static unsigned int arm_function_arg_boundary (enum machine_mode, const_tree);
static rtx aapcs_allocate_return_reg (enum machine_mode, const_tree,
static void arm_file_end (void);
static void arm_file_start (void);
-static void arm_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
+static void arm_setup_incoming_varargs (cumulative_args_t, enum machine_mode,
tree, int *, int);
-static bool arm_pass_by_reference (CUMULATIVE_ARGS *,
+static bool arm_pass_by_reference (cumulative_args_t,
enum machine_mode, const_tree, bool);
static bool arm_promote_prototypes (const_tree);
static bool arm_default_short_enums (void);
indeed make it pass in the stack if necessary). */
static rtx
-arm_function_arg (CUMULATIVE_ARGS *pcum, enum machine_mode mode,
+arm_function_arg (cumulative_args_t pcum_v, enum machine_mode mode,
const_tree type, bool named)
{
+ CUMULATIVE_ARGS *pcum = get_cumulative_args (pcum_v);
int nregs;
/* Handle the special case quickly. Pick an arbitrary value for op2 of
}
static int
-arm_arg_partial_bytes (CUMULATIVE_ARGS *pcum, enum machine_mode mode,
+arm_arg_partial_bytes (cumulative_args_t pcum_v, enum machine_mode mode,
tree type, bool named)
{
+ CUMULATIVE_ARGS *pcum = get_cumulative_args (pcum_v);
int nregs = pcum->nregs;
if (pcum->pcs_variant <= ARM_PCS_AAPCS_LOCAL)
(TYPE is null for libcalls where that information may not be available.) */
static void
-arm_function_arg_advance (CUMULATIVE_ARGS *pcum, enum machine_mode mode,
+arm_function_arg_advance (cumulative_args_t pcum_v, enum machine_mode mode,
const_tree type, bool named)
{
+ CUMULATIVE_ARGS *pcum = get_cumulative_args (pcum_v);
+
if (pcum->pcs_variant <= ARM_PCS_AAPCS_LOCAL)
{
aapcs_layout_arg (pcum, mode, type, named);
extension to the ARM ABI. */
static bool
-arm_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
+arm_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
enum machine_mode mode ATTRIBUTE_UNUSED,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
that way. */
static void
-arm_setup_incoming_varargs (CUMULATIVE_ARGS *pcum,
+arm_setup_incoming_varargs (cumulative_args_t pcum_v,
enum machine_mode mode,
tree type,
int *pretend_size,
int second_time ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *pcum = get_cumulative_args (pcum_v);
int nregs;
cfun->machine->uses_anonymous_args = 1;
static bool avr_frame_pointer_required_p (void);
static bool avr_can_eliminate (const int, const int);
static bool avr_class_likely_spilled_p (reg_class_t c);
-static rtx avr_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
+static rtx avr_function_arg (cumulative_args_t , enum machine_mode,
const_tree, bool);
-static void avr_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
+static void avr_function_arg_advance (cumulative_args_t, enum machine_mode,
const_tree, bool);
static void avr_help (void);
static bool avr_function_ok_for_sibcall (tree, tree);
in a register, and which register. */
static rtx
-avr_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
int bytes = avr_num_arg_regs (mode, type);
if (cum->nregs && bytes <= cum->nregs)
in the argument list. */
static void
-avr_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
int bytes = avr_num_arg_regs (mode, type);
cum->nregs -= bytes;
- now, the vastart pointer can access all arguments from the stack. */
static void
-setup_incoming_varargs (CUMULATIVE_ARGS *cum,
+setup_incoming_varargs (cumulative_args_t cum,
enum machine_mode mode ATTRIBUTE_UNUSED,
tree type ATTRIBUTE_UNUSED, int *pretend_size,
int no_rtl)
if they are in the first 3 words. We assume at least 1 named argument
exists, so we never generate [ARGP] = R0 here. */
- for (i = cum->words + 1; i < max_arg_registers; i++)
+ for (i = get_cumulative_args (cum)->words + 1; i < max_arg_registers; i++)
{
mem = gen_rtx_MEM (Pmode,
plus_constant (arg_pointer_rtx, (i * UNITS_PER_WORD)));
(TYPE is null for libcalls where that information may not be available.) */
static void
-bfin_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+bfin_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
int count, bytes, words;
bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
(otherwise it is an extra parameter matching an ellipsis). */
static rtx
-bfin_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+bfin_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
int bytes
= (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
stack. */
static int
-bfin_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+bfin_arg_partial_bytes (cumulative_args_t cum, enum machine_mode mode,
tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
int bytes
= (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
- int bytes_left = cum->nregs * UNITS_PER_WORD;
+ int bytes_left = get_cumulative_args (cum)->nregs * UNITS_PER_WORD;
if (bytes == -1)
return 0;
/* Variable sized types are passed by reference. */
static bool
-bfin_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
+bfin_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
enum machine_mode mode ATTRIBUTE_UNUSED,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
static rtx cris_struct_value_rtx (tree, int);
-static void cris_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
+static void cris_setup_incoming_varargs (cumulative_args_t, enum machine_mode,
tree type, int *, int);
static int cris_initial_frame_pointer_offset (void);
static int cris_memory_move_cost (enum machine_mode, reg_class_t, bool);
static bool cris_rtx_costs (rtx, int, int, int *, bool);
static int cris_address_cost (rtx, bool);
-static bool cris_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
+static bool cris_pass_by_reference (cumulative_args_t, enum machine_mode,
const_tree, bool);
-static int cris_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
+static int cris_arg_partial_bytes (cumulative_args_t, enum machine_mode,
tree, bool);
-static rtx cris_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
+static rtx cris_function_arg (cumulative_args_t, enum machine_mode,
const_tree, bool);
-static rtx cris_function_incoming_arg (CUMULATIVE_ARGS *,
+static rtx cris_function_incoming_arg (cumulative_args_t,
enum machine_mode, const_tree, bool);
-static void cris_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
+static void cris_function_arg_advance (cumulative_args_t, enum machine_mode,
const_tree, bool);
static tree cris_md_asm_clobbers (tree, tree, tree);
/* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
static void
-cris_setup_incoming_varargs (CUMULATIVE_ARGS *ca,
+cris_setup_incoming_varargs (cumulative_args_t ca_v,
enum machine_mode mode ATTRIBUTE_UNUSED,
tree type ATTRIBUTE_UNUSED,
int *pretend_arg_size,
int second_time)
{
+ CUMULATIVE_ARGS *ca = get_cumulative_args (ca_v);
+
if (ca->regs < CRIS_MAX_ARGS_IN_REGS)
{
int stdarg_regs = CRIS_MAX_ARGS_IN_REGS - ca->regs;
For cris, we pass <= 8 bytes by value, others by reference. */
static bool
-cris_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
+cris_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED,
enum machine_mode mode, const_tree type,
bool named ATTRIBUTE_UNUSED)
{
}
static int
-cris_arg_partial_bytes (CUMULATIVE_ARGS *ca, enum machine_mode mode,
+cris_arg_partial_bytes (cumulative_args_t ca, enum machine_mode mode,
tree type, bool named ATTRIBUTE_UNUSED)
{
- if (ca->regs == CRIS_MAX_ARGS_IN_REGS - 1
+ if (get_cumulative_args (ca)->regs == CRIS_MAX_ARGS_IN_REGS - 1
&& !targetm.calls.must_pass_in_stack (mode, type)
&& CRIS_FUNCTION_ARG_SIZE (mode, type) > 4
&& CRIS_FUNCTION_ARG_SIZE (mode, type) <= 8)
}
static rtx
-cris_function_arg_1 (const CUMULATIVE_ARGS *ca,
+cris_function_arg_1 (cumulative_args_t ca_v,
enum machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED,
bool named, bool incoming)
{
+ const CUMULATIVE_ARGS *ca = get_cumulative_args (ca_v);
+
if ((!incoming || named) && ca->regs < CRIS_MAX_ARGS_IN_REGS)
return gen_rtx_REG (mode, CRIS_FIRST_ARG_REG + ca->regs);
else
The void_type_node is sent as a "closing" call. */
static rtx
-cris_function_arg (CUMULATIVE_ARGS *ca, enum machine_mode mode,
+cris_function_arg (cumulative_args_t ca, enum machine_mode mode,
const_tree type, bool named)
{
return cris_function_arg_1 (ca, mode, type, named, false);
void_type_node TYPE parameter. */
static rtx
-cris_function_incoming_arg (CUMULATIVE_ARGS *ca, enum machine_mode mode,
+cris_function_incoming_arg (cumulative_args_t ca, enum machine_mode mode,
const_tree type, bool named)
{
return cris_function_arg_1 (ca, mode, type, named, true);
/* Worker function for TARGET_FUNCTION_ARG_ADVANCE. */
static void
-cris_function_arg_advance (CUMULATIVE_ARGS *ca, enum machine_mode mode,
+cris_function_arg_advance (cumulative_args_t ca_v, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *ca = get_cumulative_args (ca_v);
+
ca->regs += (3 + CRIS_FUNCTION_ARG_SIZE (mode, type)) / 4;
}
/* Zero structure to initialize current_frame_info. */
static struct fr30_frame_info zero_frame_info;
-static void fr30_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
+static void fr30_setup_incoming_varargs (cumulative_args_t, enum machine_mode,
tree, int *, int);
static bool fr30_must_pass_in_stack (enum machine_mode, const_tree);
-static int fr30_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
+static int fr30_arg_partial_bytes (cumulative_args_t, enum machine_mode,
tree, bool);
-static rtx fr30_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
+static rtx fr30_function_arg (cumulative_args_t, enum machine_mode,
const_tree, bool);
-static void fr30_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
+static void fr30_function_arg_advance (cumulative_args_t, enum machine_mode,
const_tree, bool);
static bool fr30_frame_pointer_required (void);
static rtx fr30_function_value (const_tree, const_tree, bool);
ARG_REGS_USED_SO_FAR has *not* been updated for the last named argument
which has type TYPE and mode MODE, and we rely on this fact. */
void
-fr30_setup_incoming_varargs (CUMULATIVE_ARGS *arg_regs_used_so_far,
+fr30_setup_incoming_varargs (cumulative_args_t arg_regs_used_so_far_v,
enum machine_mode mode,
tree type ATTRIBUTE_UNUSED,
int *pretend_size,
int second_time ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *arg_regs_used_so_far
+ = get_cumulative_args (arg_regs_used_so_far_v);
int size;
/* All BLKmode values are passed by reference. */
/* ??? This run-time test as well as the code inside the if
statement is probably unnecessary. */
- if (targetm.calls.strict_argument_naming (arg_regs_used_so_far))
+ if (targetm.calls.strict_argument_naming (arg_regs_used_so_far_v))
/* If TARGET_STRICT_ARGUMENT_NAMING returns true, then the last named
arg must not be treated as an anonymous arg. */
+ /* ??? This is a pointer increment, which makes no sense. */
arg_regs_used_so_far += fr30_num_arg_regs (mode, type);
size = FR30_NUM_ARG_REGS - (* arg_regs_used_so_far);
parameters to the function. */
static int
-fr30_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+fr30_arg_partial_bytes (cumulative_args_t cum_v, enum machine_mode mode,
tree type, bool named)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
/* Unnamed arguments, i.e. those that are prototyped as ...
are always passed on the stack.
Also check here to see if all the argument registers are full. */
}
static rtx
-fr30_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+fr30_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
if (!named
|| fr30_must_pass_in_stack (mode, type)
|| *cum >= FR30_NUM_ARG_REGS)
the stack. The compiler knows how to track the amount of stack space used
for arguments without any special help. */
static void
-fr30_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+fr30_function_arg_advance (cumulative_args_t cum, enum machine_mode mode,
const_tree type, bool named)
{
- *cum += named * fr30_num_arg_regs (mode, type);
+ *get_cumulative_args (cum) += named * fr30_num_arg_regs (mode, type);
}
/*}}}*/
static bool frv_in_small_data_p (const_tree);
static void frv_asm_output_mi_thunk
(FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT, tree);
-static void frv_setup_incoming_varargs (CUMULATIVE_ARGS *,
+static void frv_setup_incoming_varargs (cumulative_args_t,
enum machine_mode,
tree, int *, int);
static rtx frv_expand_builtin_saveregs (void);
static bool frv_function_ok_for_sibcall (tree, tree);
static rtx frv_struct_value_rtx (tree, int);
static bool frv_must_pass_in_stack (enum machine_mode mode, const_tree type);
-static int frv_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
+static int frv_arg_partial_bytes (cumulative_args_t, enum machine_mode,
tree, bool);
-static rtx frv_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
+static rtx frv_function_arg (cumulative_args_t, enum machine_mode,
const_tree, bool);
-static rtx frv_function_incoming_arg (CUMULATIVE_ARGS *, enum machine_mode,
+static rtx frv_function_incoming_arg (cumulative_args_t, enum machine_mode,
const_tree, bool);
-static void frv_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
+static void frv_function_arg_advance (cumulative_args_t, enum machine_mode,
const_tree, bool);
static unsigned int frv_function_arg_boundary (enum machine_mode,
const_tree);
/* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
static void
-frv_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
+frv_setup_incoming_varargs (cumulative_args_t cum_v,
enum machine_mode mode,
tree type ATTRIBUTE_UNUSED,
int *pretend_size,
int second_time)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
if (TARGET_DEBUG_ARG)
fprintf (stderr,
"setup_vararg: words = %2d, mode = %4s, pretend_size = %d, second_time = %d\n",
}
static rtx
-frv_function_arg_1 (const CUMULATIVE_ARGS *cum, enum machine_mode mode,
+frv_function_arg_1 (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type ATTRIBUTE_UNUSED, bool named,
bool incoming ATTRIBUTE_UNUSED)
{
+ const CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
enum machine_mode xmode = (mode == BLKmode) ? SImode : mode;
int arg_num = *cum;
rtx ret;
}
static rtx
-frv_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+frv_function_arg (cumulative_args_t cum, enum machine_mode mode,
const_tree type, bool named)
{
return frv_function_arg_1 (cum, mode, type, named, false);
}
static rtx
-frv_function_incoming_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+frv_function_incoming_arg (cumulative_args_t cum, enum machine_mode mode,
const_tree type, bool named)
{
return frv_function_arg_1 (cum, mode, type, named, true);
for arguments without any special help. */
static void
-frv_function_arg_advance (CUMULATIVE_ARGS *cum,
+frv_function_arg_advance (cumulative_args_t cum_v,
enum machine_mode mode,
const_tree type ATTRIBUTE_UNUSED,
bool named)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
enum machine_mode xmode = (mode == BLKmode) ? SImode : mode;
int bytes = GET_MODE_SIZE (xmode);
int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
the called function. */
static int
-frv_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+frv_arg_partial_bytes (cumulative_args_t cum, enum machine_mode mode,
tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
{
+
enum machine_mode xmode = (mode == BLKmode) ? SImode : mode;
int bytes = GET_MODE_SIZE (xmode);
int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
- int arg_num = *cum;
+ int arg_num = *get_cumulative_args (cum);
int ret;
ret = ((arg_num <= LAST_ARG_REGNUM && arg_num + words > LAST_ARG_REGNUM+1)
case the first 3 arguments are passed in registers. */
static rtx
-h8300_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+h8300_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
static const char *const hand_list[] = {
"__main",
"__cmpsi2",
(TYPE is null for libcalls where that information may not be available.) */
static void
-h8300_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+h8300_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
cum->nbytes += (mode != BLKmode
? (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) & -UNITS_PER_WORD
: (int_size_in_bytes (type) + UNITS_PER_WORD - 1) & -UNITS_PER_WORD);
may not be available.) */
static void
-ix86_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+ix86_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
HOST_WIDE_INT bytes, words;
if (mode == BLKmode)
ellipsis). */
static rtx
-ix86_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode omode,
+ix86_function_arg (cumulative_args_t cum_v, enum machine_mode omode,
const_tree type, bool named)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
enum machine_mode mode = omode;
HOST_WIDE_INT bytes, words;
rtx arg;
appropriate for passing a pointer to that type. */
static bool
-ix86_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
+ix86_pass_by_reference (cumulative_args_t cum_v ATTRIBUTE_UNUSED,
enum machine_mode mode ATTRIBUTE_UNUSED,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
/* See Windows x64 Software Convention. */
if (TARGET_64BIT && (cum ? cum->call_abi : ix86_abi) == MS_ABI)
{
}
static void
-ix86_setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+ix86_setup_incoming_varargs (cumulative_args_t cum_v, enum machine_mode mode,
tree type, int *pretend_size ATTRIBUTE_UNUSED,
int no_rtl)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
CUMULATIVE_ARGS next_cum;
tree fntype;
For stdargs, we do want to skip the last named argument. */
next_cum = *cum;
if (stdarg_p (fntype))
- ix86_function_arg_advance (&next_cum, mode, type, true);
+ ix86_function_arg_advance (pack_cumulative_args (&next_cum), mode, type,
+ true);
if (cum->call_abi == MS_ABI)
setup_incoming_varargs_ms_64 (&next_cum);
static void ia64_option_default_params (void);
static bool ia64_can_eliminate (const int, const int);
static enum machine_mode hfa_element_mode (const_tree, bool);
-static void ia64_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
+static void ia64_setup_incoming_varargs (cumulative_args_t, enum machine_mode,
tree, int *, int);
-static int ia64_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
+static int ia64_arg_partial_bytes (cumulative_args_t, enum machine_mode,
tree, bool);
-static rtx ia64_function_arg_1 (const CUMULATIVE_ARGS *, enum machine_mode,
+static rtx ia64_function_arg_1 (cumulative_args_t, enum machine_mode,
const_tree, bool, bool);
-static rtx ia64_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
+static rtx ia64_function_arg (cumulative_args_t, enum machine_mode,
const_tree, bool);
-static rtx ia64_function_incoming_arg (CUMULATIVE_ARGS *,
+static rtx ia64_function_incoming_arg (cumulative_args_t,
enum machine_mode, const_tree, bool);
-static void ia64_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
+static void ia64_function_arg_advance (cumulative_args_t, enum machine_mode,
const_tree, bool);
static unsigned int ia64_function_arg_boundary (enum machine_mode,
const_tree);
We generate the actual spill instructions during prologue generation. */
static void
-ia64_setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+ia64_setup_incoming_varargs (cumulative_args_t cum, enum machine_mode mode,
tree type, int * pretend_size,
int second_time ATTRIBUTE_UNUSED)
{
- CUMULATIVE_ARGS next_cum = *cum;
+ CUMULATIVE_ARGS next_cum = *get_cumulative_args (cum);
/* Skip the current argument. */
- ia64_function_arg_advance (&next_cum, mode, type, 1);
+ ia64_function_arg_advance (pack_cumulative_args (&next_cum), mode, type, 1);
if (next_cum.words < MAX_ARGUMENT_SLOTS)
{
registers. */
static rtx
-ia64_function_arg_1 (const CUMULATIVE_ARGS *cum, enum machine_mode mode,
+ia64_function_arg_1 (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named, bool incoming)
{
+ const CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
int basereg = (incoming ? GR_ARG_FIRST : AR_ARG_FIRST);
int words = ia64_function_arg_words (type, mode);
int offset = ia64_function_arg_offset (cum, type, words);
/* Implement TARGET_FUNCION_ARG target hook. */
static rtx
-ia64_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+ia64_function_arg (cumulative_args_t cum, enum machine_mode mode,
const_tree type, bool named)
{
return ia64_function_arg_1 (cum, mode, type, named, false);
/* Implement TARGET_FUNCION_INCOMING_ARG target hook. */
static rtx
-ia64_function_incoming_arg (CUMULATIVE_ARGS *cum,
+ia64_function_incoming_arg (cumulative_args_t cum,
enum machine_mode mode,
const_tree type, bool named)
{
in memory. */
static int
-ia64_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+ia64_arg_partial_bytes (cumulative_args_t cum_v, enum machine_mode mode,
tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
int words = ia64_function_arg_words (type, mode);
int offset = ia64_function_arg_offset (cum, type, words);
ia64_function_arg. */
static void
-ia64_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+ia64_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
int words = ia64_function_arg_words (type, mode);
int offset = ia64_function_arg_offset (cum, type, words);
enum machine_mode hfa_mode = VOIDmode;
static void iq2000_init_builtins (void);
static rtx iq2000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
static bool iq2000_return_in_memory (const_tree, const_tree);
-static void iq2000_setup_incoming_varargs (CUMULATIVE_ARGS *,
+static void iq2000_setup_incoming_varargs (cumulative_args_t,
enum machine_mode, tree, int *,
int);
static bool iq2000_rtx_costs (rtx, int, int, int *, bool);
static int iq2000_address_cost (rtx, bool);
static section *iq2000_select_section (tree, int, unsigned HOST_WIDE_INT);
static rtx iq2000_legitimize_address (rtx, rtx, enum machine_mode);
-static bool iq2000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
+static bool iq2000_pass_by_reference (cumulative_args_t, enum machine_mode,
const_tree, bool);
-static int iq2000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
+static int iq2000_arg_partial_bytes (cumulative_args_t, enum machine_mode,
tree, bool);
-static rtx iq2000_function_arg (CUMULATIVE_ARGS *,
+static rtx iq2000_function_arg (cumulative_args_t,
enum machine_mode, const_tree, bool);
-static void iq2000_function_arg_advance (CUMULATIVE_ARGS *,
+static void iq2000_function_arg_advance (cumulative_args_t,
enum machine_mode, const_tree, bool);
static unsigned int iq2000_function_arg_boundary (enum machine_mode,
const_tree);
position in CUM. */
static void
-iq2000_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+iq2000_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
if (TARGET_DEBUG_D_MODE)
{
fprintf (stderr,
and type TYPE in CUM, or 0 if the argument is to be passed on the stack. */
static rtx
-iq2000_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+iq2000_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
rtx ret;
int regbase = -1;
int bias = 0;
}
static int
-iq2000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+iq2000_arg_partial_bytes (cumulative_args_t cum_v, enum machine_mode mode,
tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
if (mode == DImode && cum->arg_words == MAX_ARGS_IN_REGISTERS - 1)
{
if (TARGET_DEBUG_D_MODE)
int i;
tree next_arg;
tree cur_arg;
- CUMULATIVE_ARGS args_so_far;
+ CUMULATIVE_ARGS args_so_far_v;
+ cumulative_args_t args_so_far;
int store_args_on_stack = (iq2000_can_use_return_insn ());
/* If struct value address is treated as the first argument. */
variable arguments.
This is only needed if store_args_on_stack is true. */
- INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0, 0);
+ INIT_CUMULATIVE_ARGS (args_so_far_v, fntype, NULL_RTX, 0, 0);
+ args_so_far = pack_cumulative_args (&args_so_far_v);
regno = GP_ARG_FIRST;
for (cur_arg = fnargs; cur_arg != 0; cur_arg = next_arg)
passed_mode = Pmode;
}
- entry_parm = iq2000_function_arg (&args_so_far, passed_mode,
+ entry_parm = iq2000_function_arg (args_so_far, passed_mode,
passed_type, true);
- iq2000_function_arg_advance (&args_so_far, passed_mode,
+ iq2000_function_arg_advance (args_so_far, passed_mode,
passed_type, true);
next_arg = DECL_CHAIN (cur_arg);
iq2000_unction_arg has encoded a PARALLEL rtx, holding a vector of
adjustments to be made as the next_arg_reg variable, so we split up
the insns, and emit them separately. */
- next_arg_reg = iq2000_function_arg (&args_so_far, VOIDmode,
+ next_arg_reg = iq2000_function_arg (args_so_far, VOIDmode,
void_type_node, true);
if (next_arg_reg != 0 && GET_CODE (next_arg_reg) == PARALLEL)
{
/* Return true when an argument must be passed by reference. */
static bool
-iq2000_pass_by_reference (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+iq2000_pass_by_reference (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
int size;
/* We must pass by reference if we would be both passing in registers
CUMULATIVE_ARGS temp;
temp = *cum;
- if (iq2000_function_arg (&temp, mode, type, named) != 0)
+ if (iq2000_function_arg (pack_cumulative_args (&temp), mode, type, named)
+ != 0)
return 1;
}
/* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
static void
-iq2000_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
+iq2000_setup_incoming_varargs (cumulative_args_t cum_v,
enum machine_mode mode ATTRIBUTE_UNUSED,
tree type ATTRIBUTE_UNUSED, int * pretend_size,
int no_rtl)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
unsigned int iq2000_off = ! cum->last_arg_fp;
unsigned int iq2000_fp_off = cum->last_arg_fp;
static void expand_save_restore (struct lm32_frame_info *info, int op);
static void stack_adjust (HOST_WIDE_INT amount);
static bool lm32_in_small_data_p (const_tree);
-static void lm32_setup_incoming_varargs (CUMULATIVE_ARGS * cum,
+static void lm32_setup_incoming_varargs (cumulative_args_t cum,
enum machine_mode mode, tree type,
int *pretend_size, int no_rtl);
static bool lm32_rtx_costs (rtx x, int code, int outer_code, int *total,
lm32_legitimate_address_p (enum machine_mode mode, rtx x, bool strict);
static HOST_WIDE_INT lm32_compute_frame_size (int size);
static void lm32_option_override (void);
-static rtx lm32_function_arg (CUMULATIVE_ARGS * cum,
+static rtx lm32_function_arg (cumulative_args_t cum,
enum machine_mode mode, const_tree type,
bool named);
-static void lm32_function_arg_advance (CUMULATIVE_ARGS * cum,
+static void lm32_function_arg_advance (cumulative_args_t cum,
enum machine_mode mode,
const_tree type, bool named);
static bool lm32_legitimate_constant_p (enum machine_mode, rtx);
(otherwise it is an extra parameter matching an ellipsis). */
static rtx
-lm32_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+lm32_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
if (mode == VOIDmode)
/* Compute operand 2 of the call insn. */
return GEN_INT (0);
}
static void
-lm32_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+lm32_function_arg_advance (cumulative_args_t cum, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
- *cum += LM32_NUM_REGS2 (mode, type);
+ *get_cumulative_args (cum) += LM32_NUM_REGS2 (mode, type);
}
HOST_WIDE_INT
}
static void
-lm32_setup_incoming_varargs (CUMULATIVE_ARGS * cum, enum machine_mode mode,
+lm32_setup_incoming_varargs (cumulative_args_t cum_v, enum machine_mode mode,
tree type, int *pretend_size, int no_rtl)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
int first_anon_arg;
tree fntype;
static void m32c_insert_attributes (tree, tree *);
static bool m32c_legitimate_address_p (enum machine_mode, rtx, bool);
static bool m32c_addr_space_legitimate_address_p (enum machine_mode, rtx, bool, addr_space_t);
-static rtx m32c_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
+static rtx m32c_function_arg (cumulative_args_t, enum machine_mode,
const_tree, bool);
-static bool m32c_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
+static bool m32c_pass_by_reference (cumulative_args_t, enum machine_mode,
const_tree, bool);
-static void m32c_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
+static void m32c_function_arg_advance (cumulative_args_t, enum machine_mode,
const_tree, bool);
static unsigned int m32c_function_arg_boundary (enum machine_mode, const_tree);
static int m32c_pushm_popm (Push_Pop_Type);
-static bool m32c_strict_argument_naming (CUMULATIVE_ARGS *);
+static bool m32c_strict_argument_naming (cumulative_args_t);
static rtx m32c_struct_value_rtx (tree, int);
static rtx m32c_subreg (enum machine_mode, rtx, enum machine_mode, int);
static int need_to_save (int);
#undef TARGET_FUNCTION_ARG
#define TARGET_FUNCTION_ARG m32c_function_arg
static rtx
-m32c_function_arg (CUMULATIVE_ARGS * ca,
+m32c_function_arg (cumulative_args_t ca_v,
enum machine_mode mode, const_tree type, bool named)
{
+ CUMULATIVE_ARGS *ca = get_cumulative_args (ca_v);
+
/* Can return a reg, parallel, or 0 for stack */
rtx rv = NULL_RTX;
#if DEBUG0
#undef TARGET_PASS_BY_REFERENCE
#define TARGET_PASS_BY_REFERENCE m32c_pass_by_reference
static bool
-m32c_pass_by_reference (CUMULATIVE_ARGS * ca ATTRIBUTE_UNUSED,
+m32c_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED,
enum machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
#undef TARGET_FUNCTION_ARG_ADVANCE
#define TARGET_FUNCTION_ARG_ADVANCE m32c_function_arg_advance
static void
-m32c_function_arg_advance (CUMULATIVE_ARGS * ca,
+m32c_function_arg_advance (cumulative_args_t ca_v,
enum machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *ca = get_cumulative_args (ca_v);
+
if (ca->force_mem)
ca->force_mem = 0;
else
#undef TARGET_STRICT_ARGUMENT_NAMING
#define TARGET_STRICT_ARGUMENT_NAMING m32c_strict_argument_naming
static bool
-m32c_strict_argument_naming (CUMULATIVE_ARGS * ca ATTRIBUTE_UNUSED)
+m32c_strict_argument_naming (cumulative_args_t ca ATTRIBUTE_UNUSED)
{
return 1;
}
static rtx m32r_function_value (const_tree, const_tree, bool);
static rtx m32r_libcall_value (enum machine_mode, const_rtx);
static bool m32r_function_value_regno_p (const unsigned int);
-static void m32r_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
+static void m32r_setup_incoming_varargs (cumulative_args_t, enum machine_mode,
tree, int *, int);
static void init_idents (void);
static bool m32r_rtx_costs (rtx, int, int, int *, bool speed);
static int m32r_memory_move_cost (enum machine_mode, reg_class_t, bool);
-static bool m32r_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
+static bool m32r_pass_by_reference (cumulative_args_t, enum machine_mode,
const_tree, bool);
-static int m32r_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
+static int m32r_arg_partial_bytes (cumulative_args_t, enum machine_mode,
tree, bool);
-static rtx m32r_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
+static rtx m32r_function_arg (cumulative_args_t, enum machine_mode,
const_tree, bool);
-static void m32r_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
+static void m32r_function_arg_advance (cumulative_args_t, enum machine_mode,
const_tree, bool);
static bool m32r_can_eliminate (const int, const int);
static void m32r_conditional_register_usage (void);
/* Return nonzero if TYPE must be passed by indirect reference. */
static bool
-m32r_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
+m32r_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED,
enum machine_mode mode, const_tree type,
bool named ATTRIBUTE_UNUSED)
{
\f
static int
-m32r_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+m32r_arg_partial_bytes (cumulative_args_t cum_v, enum machine_mode mode,
tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
int words;
unsigned int size =
(((mode == BLKmode && type)
and the rest are pushed. */
static rtx
-m32r_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+m32r_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
return (PASS_IN_REG_P (*cum, mode, type)
? gen_rtx_REG (mode, ROUND_ADVANCE_CUM (*cum, mode, type))
: NULL_RTX);
(TYPE is null for libcalls where that information may not be available.) */
static void
-m32r_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+m32r_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
*cum = (ROUND_ADVANCE_CUM (*cum, mode, type)
+ ROUND_ADVANCE_ARG (mode, type));
}
static bool
m32r_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
{
- return m32r_pass_by_reference (NULL, TYPE_MODE (type), type, false);
+ cumulative_args_t dummy = pack_cumulative_args (NULL);
+
+ return m32r_pass_by_reference (dummy, TYPE_MODE (type), type, false);
}
/* Worker function for TARGET_FUNCTION_VALUE. */
and mode MODE, and we rely on this fact. */
static void
-m32r_setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+m32r_setup_incoming_varargs (cumulative_args_t cum, enum machine_mode mode,
tree type, int *pretend_size, int no_rtl)
{
int first_anon_arg;
/* All BLKmode values are passed by reference. */
gcc_assert (mode != BLKmode);
- first_anon_arg = (ROUND_ADVANCE_CUM (*cum, mode, type)
+ first_anon_arg = (ROUND_ADVANCE_CUM (*get_cumulative_args (cum), mode, type)
+ ROUND_ADVANCE_ARG (mode, type));
if (first_anon_arg < M32R_MAX_PARM_REGS)
static void m68k_trampoline_init (rtx, tree, rtx);
static int m68k_return_pops_args (tree, tree, int);
static rtx m68k_delegitimize_address (rtx);
-static void m68k_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
+static void m68k_function_arg_advance (cumulative_args_t, enum machine_mode,
const_tree, bool);
-static rtx m68k_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
+static rtx m68k_function_arg (cumulative_args_t, enum machine_mode,
const_tree, bool);
static bool m68k_cannot_force_const_mem (enum machine_mode mode, rtx x);
\f
/* On the m68k all args are always pushed. */
static rtx
-m68k_function_arg (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
+m68k_function_arg (cumulative_args_t cum ATTRIBUTE_UNUSED,
enum machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
}
static void
-m68k_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+m68k_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
*cum += (mode != BLKmode
? (GET_MODE_SIZE (mode) + 3) & ~3
: (int_size_in_bytes (type) + 3) & ~3);
static int try_constant_tricks (long, HOST_WIDE_INT *, HOST_WIDE_INT *);
static const char * output_inline_const (enum machine_mode, rtx *);
static void layout_mcore_frame (struct mcore_frame *);
-static void mcore_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode, tree, int *, int);
+static void mcore_setup_incoming_varargs (cumulative_args_t, enum machine_mode, tree, int *, int);
static cond_type is_cond_candidate (rtx);
static rtx emit_new_cond_insn (rtx, int);
static rtx conditionalize_block (rtx);
static bool mcore_rtx_costs (rtx, int, int, int *, bool);
static void mcore_external_libcall (rtx);
static bool mcore_return_in_memory (const_tree, const_tree);
-static int mcore_arg_partial_bytes (CUMULATIVE_ARGS *,
+static int mcore_arg_partial_bytes (cumulative_args_t,
enum machine_mode,
tree, bool);
-static rtx mcore_function_arg (CUMULATIVE_ARGS *,
+static rtx mcore_function_arg (cumulative_args_t,
enum machine_mode,
const_tree, bool);
-static void mcore_function_arg_advance (CUMULATIVE_ARGS *,
+static void mcore_function_arg_advance (cumulative_args_t,
enum machine_mode,
const_tree, bool);
static unsigned int mcore_function_arg_boundary (enum machine_mode,
/* Keep track of some information about varargs for the prolog. */
static void
-mcore_setup_incoming_varargs (CUMULATIVE_ARGS *args_so_far,
+mcore_setup_incoming_varargs (cumulative_args_t args_so_far_v,
enum machine_mode mode, tree type,
int * ptr_pretend_size ATTRIBUTE_UNUSED,
int second_time ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *args_so_far = get_cumulative_args (args_so_far_v);
+
current_function_anonymous_args = 1;
/* We need to know how many argument registers are used before
its data type forbids. */
static rtx
-mcore_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+mcore_function_arg (cumulative_args_t cum, enum machine_mode mode,
const_tree type, bool named)
{
int arg_reg;
if (targetm.calls.must_pass_in_stack (mode, type))
return 0;
- arg_reg = ROUND_REG (*cum, mode);
+ arg_reg = ROUND_REG (*get_cumulative_args (cum), mode);
if (arg_reg < NPARM_REGS)
return handle_structs_in_regs (mode, type, FIRST_PARM_REG + arg_reg);
}
static void
-mcore_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+mcore_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
*cum = (ROUND_REG (*cum, mode)
+ (int)named * mcore_num_arg_regs (mode, type));
}
the function. */
static int
-mcore_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+mcore_arg_partial_bytes (cumulative_args_t cum, enum machine_mode mode,
tree type, bool named)
{
- int reg = ROUND_REG (*cum, mode);
+ int reg = ROUND_REG (*get_cumulative_args (cum), mode);
if (named == 0)
return 0;
static void mep_bundle_insns (rtx);
static bool mep_rtx_cost (rtx, int, int, int *, bool);
static int mep_address_cost (rtx, bool);
-static void mep_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
+static void mep_setup_incoming_varargs (cumulative_args_t, enum machine_mode,
tree, int *, int);
-static bool mep_pass_by_reference (CUMULATIVE_ARGS * cum, enum machine_mode,
+static bool mep_pass_by_reference (cumulative_args_t cum, enum machine_mode,
const_tree, bool);
-static rtx mep_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
+static rtx mep_function_arg (cumulative_args_t, enum machine_mode,
const_tree, bool);
-static void mep_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
+static void mep_function_arg_advance (cumulative_args_t, enum machine_mode,
const_tree, bool);
static bool mep_vector_mode_supported_p (enum machine_mode);
static rtx mep_allocate_initial_value (rtx);
/* Function args in registers. */
static void
-mep_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
+mep_setup_incoming_varargs (cumulative_args_t cum,
enum machine_mode mode ATTRIBUTE_UNUSED,
tree type ATTRIBUTE_UNUSED, int *pretend_size,
int second_time ATTRIBUTE_UNUSED)
{
- int nsave = 4 - (cum->nregs + 1);
+ int nsave = 4 - (get_cumulative_args (cum)->nregs + 1);
if (nsave > 0)
cfun->machine->arg_regs_to_save = nsave;
first arg. For varargs, we copy $1..$4 to the stack. */
static rtx
-mep_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+mep_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
/* VOIDmode is a signal for the backend to pass data to the call
expander via the second operand to the call pattern. We use
this to determine whether to use "jsr" or "jsrv". */
}
static bool
-mep_pass_by_reference (CUMULATIVE_ARGS * cum ATTRIBUTE_UNUSED,
+mep_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
enum machine_mode mode,
const_tree type,
bool named ATTRIBUTE_UNUSED)
return true;
if (size <= 4)
return false;
- if (TARGET_IVC2 && cum->nregs < 4 && type != NULL_TREE && VECTOR_TYPE_P (type))
+ if (TARGET_IVC2 && get_cumulative_args (cum)->nregs < 4
+ && type != NULL_TREE && VECTOR_TYPE_P (type))
return false;
return true;
}
static void
-mep_function_arg_advance (CUMULATIVE_ARGS *pcum,
+mep_function_arg_advance (cumulative_args_t pcum,
enum machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
- pcum->nregs += 1;
+ get_cumulative_args (pcum)->nregs += 1;
}
bool
/* Advance the argument to the next argument position. */
static void
-microblaze_function_arg_advance (CUMULATIVE_ARGS * cum, enum machine_mode mode,
+microblaze_function_arg_advance (cumulative_args_t cum_v,
+ enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
cum->arg_number++;
switch (mode)
{
or 0 if the argument is to be passed on the stack. */
static rtx
-microblaze_function_arg (CUMULATIVE_ARGS * cum, enum machine_mode mode,
+microblaze_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
rtx ret;
int regbase = -1;
int *arg_words = &cum->arg_words;
/* Return number of bytes of argument to put in registers. */
static int
-function_arg_partial_bytes (CUMULATIVE_ARGS * cum, enum machine_mode mode,
+function_arg_partial_bytes (cumulative_args_t cum_v, enum machine_mode mode,
tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
if ((mode == BLKmode
|| GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
|| GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
int i;
tree next_arg;
tree cur_arg;
- CUMULATIVE_ARGS args_so_far;
+ CUMULATIVE_ARGS args_so_far_v;
+ cumulative_args_t args_so_far;
rtx mem_rtx, reg_rtx;
/* If struct value address is treated as the first argument, make it so. */
/* Determine the last argument, and get its name. */
- INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0, 0);
+ INIT_CUMULATIVE_ARGS (args_so_far_v, fntype, NULL_RTX, 0, 0);
+ args_so_far = pack_cumulative_args (&args_so_far_v);
regno = GP_ARG_FIRST;
for (cur_arg = fnargs; cur_arg != 0; cur_arg = next_arg)
passed_mode = Pmode;
}
- entry_parm = targetm.calls.function_arg (&args_so_far, passed_mode,
+ entry_parm = targetm.calls.function_arg (args_so_far, passed_mode,
passed_type, true);
if (entry_parm)
break;
}
- targetm.calls.function_arg_advance (&args_so_far, passed_mode,
+ targetm.calls.function_arg_advance (args_so_far, passed_mode,
passed_type, true);
next_arg = TREE_CHAIN (cur_arg);
/* Split parallel insn into a sequence of insns. */
- next_arg_reg = targetm.calls.function_arg (&args_so_far, VOIDmode,
+ next_arg_reg = targetm.calls.function_arg (args_so_far, VOIDmode,
void_type_node, true);
if (next_arg_reg != 0 && GET_CODE (next_arg_reg) == PARALLEL)
{
/* Implement TARGET_STRICT_ARGUMENT_NAMING. */
static bool
-mips_strict_argument_naming (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED)
+mips_strict_argument_naming (cumulative_args_t ca ATTRIBUTE_UNUSED)
{
return !TARGET_OLDABI;
}
/* Implement TARGET_FUNCTION_ARG. */
static rtx
-mips_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+mips_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
struct mips_arg_info info;
/* We will be called with a mode of VOIDmode after the last argument
/* Implement TARGET_FUNCTION_ARG_ADVANCE. */
static void
-mips_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+mips_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
struct mips_arg_info info;
mips_get_arg_info (&info, cum, mode, type, named);
/* Implement TARGET_ARG_PARTIAL_BYTES. */
static int
-mips_arg_partial_bytes (CUMULATIVE_ARGS *cum,
+mips_arg_partial_bytes (cumulative_args_t cum,
enum machine_mode mode, tree type, bool named)
{
struct mips_arg_info info;
- mips_get_arg_info (&info, cum, mode, type, named);
+ mips_get_arg_info (&info, get_cumulative_args (cum), mode, type, named);
return info.stack_words > 0 ? info.reg_words * UNITS_PER_WORD : 0;
}
/* Return nonzero when an argument must be passed by reference. */
static bool
-mips_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
+mips_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
enum machine_mode mode, const_tree type,
bool named ATTRIBUTE_UNUSED)
{
/* Implement TARGET_CALLEE_COPIES. */
static bool
-mips_callee_copies (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
+mips_callee_copies (cumulative_args_t cum ATTRIBUTE_UNUSED,
enum machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED, bool named)
{
/* Implement TARGET_SETUP_INCOMING_VARARGS. */
static void
-mips_setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+mips_setup_incoming_varargs (cumulative_args_t cum, enum machine_mode mode,
tree type, int *pretend_size ATTRIBUTE_UNUSED,
int no_rtl)
{
/* The caller has advanced CUM up to, but not beyond, the last named
argument. Advance a local copy of CUM past the last "real" named
argument, to find out how many registers are left over. */
- local_cum = *cum;
- mips_function_arg_advance (&local_cum, mode, type, true);
+ local_cum = *get_cumulative_args (cum);
+ mips_function_arg_advance (pack_cumulative_args (&local_cum), mode, type,
+ true);
/* Found out how many registers we need to save. */
gp_saved = MAX_ARGS_IN_REGISTERS - local_cum.num_gprs;
else
mips_output_64bit_xfer (direction, gparg, fparg);
- mips_function_arg_advance (&cum, mode, NULL, true);
+ mips_function_arg_advance (pack_cumulative_args (&cum), mode, NULL, true);
}
}
/* Definitions of target machine for GNU compiler, for MMIX.
Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
- 2010
+ 2010, 2011
Free Software Foundation, Inc.
Contributed by Hans-Peter Nilsson (hp@bitrange.com)
static void mmix_asm_output_mi_thunk
(FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT, tree);
static void mmix_setup_incoming_varargs
- (CUMULATIVE_ARGS *, enum machine_mode, tree, int *, int);
+ (cumulative_args_t, enum machine_mode, tree, int *, int);
static void mmix_file_start (void);
static void mmix_file_end (void);
static bool mmix_rtx_costs (rtx, int, int, int *, bool);
static enum machine_mode mmix_promote_function_mode (const_tree,
enum machine_mode,
int *, const_tree, int);
-static void mmix_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
+static void mmix_function_arg_advance (cumulative_args_t, enum machine_mode,
const_tree, bool);
-static rtx mmix_function_arg_1 (const CUMULATIVE_ARGS *, enum machine_mode,
+static rtx mmix_function_arg_1 (const cumulative_args_t, enum machine_mode,
const_tree, bool, bool);
-static rtx mmix_function_incoming_arg (CUMULATIVE_ARGS *, enum machine_mode,
+static rtx mmix_function_incoming_arg (cumulative_args_t, enum machine_mode,
const_tree, bool);
-static rtx mmix_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
+static rtx mmix_function_arg (cumulative_args_t, enum machine_mode,
const_tree, bool);
static rtx mmix_function_value (const_tree, const_tree, bool);
static rtx mmix_libcall_value (enum machine_mode, const_rtx);
static bool mmix_function_value_regno_p (const unsigned int);
-static bool mmix_pass_by_reference (CUMULATIVE_ARGS *,
+static bool mmix_pass_by_reference (cumulative_args_t,
enum machine_mode, const_tree, bool);
static bool mmix_frame_pointer_required (void);
static void mmix_asm_trampoline_template (FILE *);
}
static void
-mmix_function_arg_advance (CUMULATIVE_ARGS *argsp, enum machine_mode mode,
+mmix_function_arg_advance (cumulative_args_t argsp_v, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *argsp = get_cumulative_args (argsp_v);
int arg_size = MMIX_FUNCTION_ARG_SIZE (mode, type);
argsp->regs = ((targetm.calls.must_pass_in_stack (mode, type)
/* Helper function for mmix_function_arg and mmix_function_incoming_arg. */
static rtx
-mmix_function_arg_1 (const CUMULATIVE_ARGS *argsp,
+mmix_function_arg_1 (const cumulative_args_t argsp_v,
enum machine_mode mode,
const_tree type,
bool named ATTRIBUTE_UNUSED,
bool incoming)
{
+ CUMULATIVE_ARGS *argsp = get_cumulative_args (argsp_v);
+
/* Last-argument marker. */
if (type == void_type_node)
return (argsp->regs < MMIX_MAX_ARGS_IN_REGS)
one that must go on stack. */
static rtx
-mmix_function_arg (CUMULATIVE_ARGS *argsp,
+mmix_function_arg (cumulative_args_t argsp,
enum machine_mode mode,
const_tree type,
bool named)
}
static rtx
-mmix_function_incoming_arg (CUMULATIVE_ARGS *argsp,
+mmix_function_incoming_arg (cumulative_args_t argsp,
enum machine_mode mode,
const_tree type,
bool named)
everything that goes by value. */
static bool
-mmix_pass_by_reference (CUMULATIVE_ARGS *argsp, enum machine_mode mode,
+mmix_pass_by_reference (cumulative_args_t argsp_v, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *argsp = get_cumulative_args (argsp_v);
+
/* FIXME: Check: I'm not sure the must_pass_in_stack check is
necessary. */
if (targetm.calls.must_pass_in_stack (mode, type))
can parse all arguments in registers, to improve performance. */
static void
-mmix_setup_incoming_varargs (CUMULATIVE_ARGS *args_so_farp,
+mmix_setup_incoming_varargs (cumulative_args_t args_so_farp_v,
enum machine_mode mode,
tree vartype,
int *pretend_sizep,
int second_time ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *args_so_farp = get_cumulative_args (args_so_farp_v);
+
/* The last named variable has been handled, but
args_so_farp has not been advanced for it. */
if (args_so_farp->regs + 1 < MMIX_MAX_ARGS_IN_REGS)
/* Return true when a parameter should be passed by reference. */
static bool
-mn10300_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
+mn10300_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
enum machine_mode mode, const_tree type,
bool named ATTRIBUTE_UNUSED)
{
from a function. If the result is NULL_RTX, the argument is pushed. */
static rtx
-mn10300_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+mn10300_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
rtx result = NULL_RTX;
int size;
(TYPE is null for libcalls where that information may not be available.) */
static void
-mn10300_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+mn10300_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
cum->nbytes += (mode != BLKmode
? (GET_MODE_SIZE (mode) + 3) & ~3
: (int_size_in_bytes (type) + 3) & ~3);
partially in registers and partially in memory. */
static int
-mn10300_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+mn10300_arg_partial_bytes (cumulative_args_t cum_v, enum machine_mode mode,
tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
int size;
/* We only support using 2 data registers as argument registers. */
/* Target Code for moxie
- Copyright (C) 2008, 2009, 2010 Free Software Foundation
+ Copyright (C) 2008, 2009, 2010, 2011 Free Software Foundation
Contributed by Anthony Green.
This file is part of GCC.
/* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
static void
-moxie_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
+moxie_setup_incoming_varargs (cumulative_args_t cum_v,
enum machine_mode mode ATTRIBUTE_UNUSED,
tree type ATTRIBUTE_UNUSED,
int *pretend_size, int no_rtl)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
int regno;
int regs = 8 - *cum;
NULL_RTX if there's no more space. */
static rtx
-moxie_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+moxie_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
if (*cum < 8)
return gen_rtx_REG (mode, *cum);
else
: (unsigned) int_size_in_bytes (TYPE))
static void
-moxie_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+moxie_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
*cum = (*cum < MOXIE_R6
? *cum + ((3 + MOXIE_FUNCTION_ARG_SIZE (mode, type)) / 4)
: *cum);
passed by reference. */
static bool
-moxie_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
+moxie_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
enum machine_mode mode, const_tree type,
bool named ATTRIBUTE_UNUSED)
{
that fit in argument passing registers. */
static int
-moxie_arg_partial_bytes (CUMULATIVE_ARGS *cum,
+moxie_arg_partial_bytes (cumulative_args_t cum_v,
enum machine_mode mode,
tree type, bool named)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
int bytes_left, size;
if (*cum >= 8)
return 0;
- if (moxie_pass_by_reference (cum, mode, type, named))
+ if (moxie_pass_by_reference (cum_v, mode, type, named))
size = 4;
else if (type)
{
static void pa_hpux_init_libfuncs (void);
#endif
static rtx pa_struct_value_rtx (tree, int);
-static bool pa_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
+static bool pa_pass_by_reference (cumulative_args_t, enum machine_mode,
const_tree, bool);
-static int pa_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
+static int pa_arg_partial_bytes (cumulative_args_t, enum machine_mode,
tree, bool);
-static void pa_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
+static void pa_function_arg_advance (cumulative_args_t, enum machine_mode,
const_tree, bool);
-static rtx pa_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
+static rtx pa_function_arg (cumulative_args_t, enum machine_mode,
const_tree, bool);
static unsigned int pa_function_arg_boundary (enum machine_mode, const_tree);
static struct machine_function * pa_init_machine_status (void);
or updates the ABI. */
static bool
-pa_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
+pa_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED,
enum machine_mode mode, const_tree type,
bool named ATTRIBUTE_UNUSED)
{
(TYPE is null for libcalls where that information may not be available.) */
static void
-pa_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+pa_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
int arg_size = FUNCTION_ARG_SIZE (mode, type);
cum->nargs_prototype--;
??? We might want to restructure this so that it looks more like other
ports. */
static rtx
-pa_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+pa_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
int max_arg_words = (TARGET_64BIT ? 8 : 4);
int alignment = 0;
int arg_size;
then this routine should return zero. */
static int
-pa_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+pa_arg_partial_bytes (cumulative_args_t cum_v, enum machine_mode mode,
tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
unsigned int max_arg_words = 8;
unsigned int offset = 0;
static rtx pdp11_libcall_value (enum machine_mode, const_rtx);
static bool pdp11_function_value_regno_p (const unsigned int);
static void pdp11_trampoline_init (rtx, tree, rtx);
-static rtx pdp11_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
+static rtx pdp11_function_arg (cumulative_args_t, enum machine_mode,
const_tree, bool);
-static void pdp11_function_arg_advance (CUMULATIVE_ARGS *,
+static void pdp11_function_arg_advance (cumulative_args_t,
enum machine_mode, const_tree, bool);
static void pdp11_conditional_register_usage (void);
static bool pdp11_legitimate_constant_p (enum machine_mode, rtx);
(otherwise it is an extra parameter matching an ellipsis). */
static rtx
-pdp11_function_arg (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
+pdp11_function_arg (cumulative_args_t cum ATTRIBUTE_UNUSED,
enum machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
may not be available.) */
static void
-pdp11_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+pdp11_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
*cum += (mode != BLKmode
? GET_MODE_SIZE (mode)
: int_size_in_bytes (type));
void picochip_init_libfuncs (void);
void picochip_reorg (void);
-int picochip_arg_partial_bytes (CUMULATIVE_ARGS * p_cum,
+int picochip_arg_partial_bytes (cumulative_args_t p_cum,
enum machine_mode mode,
tree type, bool named);
-rtx picochip_function_arg (CUMULATIVE_ARGS * p_cum,
+rtx picochip_function_arg (cumulative_args_t p_cum,
enum machine_mode mode,
const_tree type, bool named);
-rtx picochip_incoming_function_arg (CUMULATIVE_ARGS * p_cum,
+rtx picochip_incoming_function_arg (cumulative_args_t p_cum,
enum machine_mode mode,
const_tree type, bool named);
-void picochip_arg_advance (CUMULATIVE_ARGS * p_cum, enum machine_mode mode,
+void picochip_arg_advance (cumulative_args_t p_cum, enum machine_mode mode,
const_tree type, bool named);
unsigned int picochip_function_arg_boundary (enum machine_mode mode,
const_tree type);
/* Determine where the next outgoing arg should be placed. */
rtx
-picochip_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+picochip_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
int reg = 0;
int type_align_in_units = 0;
int type_size_in_units;
passed in registers, which are then pushed onto the stack by the
function prologue). */
rtx
-picochip_incoming_function_arg (CUMULATIVE_ARGS *cum,
+picochip_incoming_function_arg (cumulative_args_t cum,
enum machine_mode mode,
const_tree type, bool named)
{
/* Compute partial registers. */
int
-picochip_arg_partial_bytes (CUMULATIVE_ARGS * p_cum, enum machine_mode mode,
+picochip_arg_partial_bytes (cumulative_args_t p_cum, enum machine_mode mode,
tree type, bool named ATTRIBUTE_UNUSED)
{
int type_align_in_units = 0;
int new_offset = 0;
int offset_overflow = 0;
- unsigned cum = *((unsigned *) p_cum);
+ unsigned cum = *get_cumulative_args (p_cum);
/* VOIDmode is passed when computing the second argument to a `call'
pattern. This can be ignored. */
/* Advance the cumulative args counter CUM. */
void
-picochip_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+picochip_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
int type_align_in_units = 0;
int type_size_in_units;
int new_offset = 0;
rtx[], int *);
static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, const_tree, bool, bool);
static rtx rs6000_mixed_function_arg (enum machine_mode, const_tree, int);
-static void rs6000_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
+static void rs6000_function_arg_advance (cumulative_args_t, enum machine_mode,
const_tree, bool);
-static rtx rs6000_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
+static rtx rs6000_function_arg (cumulative_args_t, enum machine_mode,
const_tree, bool);
static unsigned int rs6000_function_arg_boundary (enum machine_mode,
const_tree);
static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
-static void setup_incoming_varargs (CUMULATIVE_ARGS *,
+static void setup_incoming_varargs (cumulative_args_t,
enum machine_mode, tree,
int *, int);
-static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
+static bool rs6000_pass_by_reference (cumulative_args_t, enum machine_mode,
const_tree, bool);
-static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
+static int rs6000_arg_partial_bytes (cumulative_args_t, enum machine_mode,
tree, bool);
static const char *invalid_arg_for_unprototyped_fn (const_tree, const_tree, const_tree);
#if TARGET_MACHO
}
static void
-rs6000_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+rs6000_function_arg_advance (cumulative_args_t cum, enum machine_mode mode,
const_tree type, bool named)
{
- rs6000_function_arg_advance_1 (cum, mode, type, named, 0);
+ rs6000_function_arg_advance_1 (get_cumulative_args (cum), mode, type, named,
+ 0);
}
static rtx
itself. */
static rtx
-rs6000_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+rs6000_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
enum rs6000_abi abi = DEFAULT_ABI;
/* Return a marker to indicate whether CR1 needs to set or clear the
returns the number of bytes used by the first element of the PARALLEL. */
static int
-rs6000_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+rs6000_arg_partial_bytes (cumulative_args_t cum_v, enum machine_mode mode,
tree type, bool named)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
int ret = 0;
int align_words;
reference. */
static bool
-rs6000_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
+rs6000_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
enum machine_mode mode, const_tree type,
bool named ATTRIBUTE_UNUSED)
{
stack and set PRETEND_SIZE to the length of the registers pushed. */
static void
-setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+setup_incoming_varargs (cumulative_args_t cum, enum machine_mode mode,
tree type, int *pretend_size ATTRIBUTE_UNUSED,
int no_rtl)
{
alias_set_type set;
/* Skip the last named argument. */
- next_cum = *cum;
+ next_cum = *get_cumulative_args (cum);
rs6000_function_arg_advance_1 (&next_cum, mode, type, true, 0);
if (DEFAULT_ABI == ABI_V4)
variable parameter list. */
static rtx
-rx_function_arg (CUMULATIVE_ARGS * cum, enum machine_mode mode,
+rx_function_arg (cumulative_args_t cum, enum machine_mode mode,
const_tree type, bool named)
{
unsigned int next_reg;
- unsigned int bytes_so_far = *cum;
+ unsigned int bytes_so_far = *get_cumulative_args (cum);
unsigned int size;
unsigned int rounded_size;
}
static void
-rx_function_arg_advance (CUMULATIVE_ARGS * cum, enum machine_mode mode,
+rx_function_arg_advance (cumulative_args_t cum, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
- *cum += rx_function_arg_size (mode, type);
+ *get_cumulative_args (cum) += rx_function_arg_size (mode, type);
}
static unsigned int
reference. */
static bool
-s390_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
+s390_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED,
enum machine_mode mode, const_tree type,
bool named ATTRIBUTE_UNUSED)
{
matching an ellipsis). */
static void
-s390_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+s390_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
if (s390_function_arg_float (mode, type))
{
cum->fprs += 1;
are pushed to the stack. */
static rtx
-s390_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+s390_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
if (s390_function_arg_float (mode, type))
{
if (cum->fprs + 1 > FP_ARG_NUM_REG)
static bool
s390_call_saved_register_used (tree call_expr)
{
- CUMULATIVE_ARGS cum;
+ CUMULATIVE_ARGS cum_v;
+ cumulative_args_t cum;
tree parameter;
enum machine_mode mode;
tree type;
rtx parm_rtx;
int reg, i;
- INIT_CUMULATIVE_ARGS (cum, NULL, NULL, 0, 0);
+ INIT_CUMULATIVE_ARGS (cum_v, NULL, NULL, 0, 0);
+ cum = pack_cumulative_args (&cum_v);
for (i = 0; i < call_expr_nargs (call_expr); i++)
{
mode = TYPE_MODE (type);
gcc_assert (mode);
- if (pass_by_reference (&cum, mode, type, true))
+ if (pass_by_reference (&cum_v, mode, type, true))
{
mode = Pmode;
type = build_pointer_type (type);
}
- parm_rtx = s390_function_arg (&cum, mode, type, 0);
+ parm_rtx = s390_function_arg (cum, mode, type, 0);
- s390_function_arg_advance (&cum, mode, type, 0);
+ s390_function_arg_advance (cum, mode, type, 0);
if (!parm_rtx)
continue;
/* score-protos.h for Sunplus S+CORE processor
- Copyright (C) 2005, 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
+ Copyright (C) 2005, 2007, 2008, 2009, 2010, 2011
+ Free Software Foundation, Inc.
This file is part of GCC.
extern HOST_WIDE_INT score_initial_elimination_offset (int from, int to);
extern void score_print_operand (FILE *file, rtx op, int letter);
extern void score_print_operand_address (FILE *file, rtx addr);
-extern int score_arg_partial_bytes (CUMULATIVE_ARGS *cum,
- enum machine_mode mode,
- tree type, bool named);
extern int score_symbolic_constant_p (rtx x,
enum score_symbol_type *symbol_type);
extern void score_movsicc (rtx *ops);
/* Return nonzero when an argument must be passed by reference. */
static bool
-score_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
+score_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
enum machine_mode mode, const_tree type,
bool named ATTRIBUTE_UNUSED)
{
/* Implement TARGET_FUNCTION_ARG_ADVANCE hook. */
static void
-score_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+score_function_arg_advance (cumulative_args_t cum, enum machine_mode mode,
const_tree type, bool named)
{
if (TARGET_SCORE7 || TARGET_SCORE7D)
- score7_function_arg_advance (cum, mode, type, named);
+ score7_function_arg_advance (get_cumulative_args (cum), mode, type, named);
else
gcc_unreachable ();
}
/* Implement TARGET_ARG_PARTIAL_BYTES macro. */
int
-score_arg_partial_bytes (CUMULATIVE_ARGS *cum,
+score_arg_partial_bytes (cumulative_args_t cum,
enum machine_mode mode, tree type, bool named)
{
if (TARGET_SCORE7 || TARGET_SCORE7D)
- return score7_arg_partial_bytes (cum, mode, type, named);
+ return score7_arg_partial_bytes (get_cumulative_args (cum), mode, type,
+ named);
else
gcc_unreachable ();
}
/* Implement TARGET_FUNCTION_ARG hook. */
static rtx
-score_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+score_function_arg (cumulative_args_t cum, enum machine_mode mode,
const_tree type, bool named)
{
if (TARGET_SCORE7 || TARGET_SCORE7D)
- return score7_function_arg (cum, mode, type, named);
+ return score7_function_arg (get_cumulative_args (cum), mode, type, named);
else
gcc_unreachable ();
}
static rtx sh_libcall_value (enum machine_mode, const_rtx);
static bool sh_return_in_memory (const_tree, const_tree);
static rtx sh_builtin_saveregs (void);
-static void sh_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode, tree, int *, int);
-static bool sh_strict_argument_naming (CUMULATIVE_ARGS *);
-static bool sh_pretend_outgoing_varargs_named (CUMULATIVE_ARGS *);
+static void sh_setup_incoming_varargs (cumulative_args_t, enum machine_mode, tree, int *, int);
+static bool sh_strict_argument_naming (cumulative_args_t);
+static bool sh_pretend_outgoing_varargs_named (cumulative_args_t);
static tree sh_build_builtin_va_list (void);
static void sh_va_start (tree, rtx);
static tree sh_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
int *punsignedp,
const_tree funtype,
int for_return);
-static bool sh_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
+static bool sh_pass_by_reference (cumulative_args_t, enum machine_mode,
const_tree, bool);
-static bool sh_callee_copies (CUMULATIVE_ARGS *, enum machine_mode,
+static bool sh_callee_copies (cumulative_args_t, enum machine_mode,
const_tree, bool);
-static int sh_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
+static int sh_arg_partial_bytes (cumulative_args_t, enum machine_mode,
tree, bool);
-static void sh_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
+static void sh_function_arg_advance (cumulative_args_t, enum machine_mode,
const_tree, bool);
-static rtx sh_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
+static rtx sh_function_arg (cumulative_args_t, enum machine_mode,
const_tree, bool);
static bool sh_scalar_mode_supported_p (enum machine_mode);
static int sh_dwarf_calling_convention (const_tree);
}
static bool
-sh_pass_by_reference (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+sh_pass_by_reference (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
if (targetm.calls.must_pass_in_stack (mode, type))
return true;
}
static bool
-sh_callee_copies (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+sh_callee_copies (cumulative_args_t cum, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
/* ??? How can it possibly be correct to return true only on the
caller side of the equation? Is there someplace else in the
sh backend that's magically producing the copies? */
- return (cum->outgoing
+ return (get_cumulative_args (cum)->outgoing
&& ((mode == BLKmode ? TYPE_ALIGN (type) : GET_MODE_ALIGNMENT (mode))
% SH_MIN_ALIGN_FOR_CALLEE_COPY == 0));
}
static int
-sh_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+sh_arg_partial_bytes (cumulative_args_t cum_v, enum machine_mode mode,
tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
int words = 0;
if (!TARGET_SH5
its data type forbids. */
static rtx
-sh_function_arg (CUMULATIVE_ARGS *ca, enum machine_mode mode,
+sh_function_arg (cumulative_args_t ca_v, enum machine_mode mode,
const_tree type, bool named)
{
+ CUMULATIVE_ARGS *ca = get_cumulative_args (ca_v);
+
if (! TARGET_SH5 && mode == VOIDmode)
return GEN_INT (ca->renesas_abi ? 1 : 0);
available.) */
static void
-sh_function_arg_advance (CUMULATIVE_ARGS *ca, enum machine_mode mode,
+sh_function_arg_advance (cumulative_args_t ca_v, enum machine_mode mode,
const_tree type, bool named)
{
+ CUMULATIVE_ARGS *ca = get_cumulative_args (ca_v);
+
if (ca->force_mem)
ca->force_mem = 0;
else if (TARGET_SH5)
later. Fortunately, we already have two flags that are part of struct
function that tell if a function uses varargs or stdarg. */
static void
-sh_setup_incoming_varargs (CUMULATIVE_ARGS *ca,
+sh_setup_incoming_varargs (cumulative_args_t ca,
enum machine_mode mode,
tree type,
int *pretend_arg_size,
{
int named_parm_regs, anon_parm_regs;
- named_parm_regs = (ROUND_REG (*ca, mode)
+ named_parm_regs = (ROUND_REG (*get_cumulative_args (ca), mode)
+ (mode == BLKmode
? ROUND_ADVANCE (int_size_in_bytes (type))
: ROUND_ADVANCE (GET_MODE_SIZE (mode))));
}
static bool
-sh_strict_argument_naming (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED)
+sh_strict_argument_naming (cumulative_args_t ca ATTRIBUTE_UNUSED)
{
return TARGET_SH5;
}
static bool
-sh_pretend_outgoing_varargs_named (CUMULATIVE_ARGS *ca)
+sh_pretend_outgoing_varargs_named (cumulative_args_t ca_v)
{
+ CUMULATIVE_ARGS *ca = get_cumulative_args (ca_v);
+
return ! (TARGET_HITACHI || ca->renesas_abi) && ! TARGET_SH5;
}
{
tree ptype = build_pointer_type (TREE_TYPE (funtype));
- sh_function_arg_advance (&cum, Pmode, ptype, true);
+ sh_function_arg_advance (pack_cumulative_args (&cum), Pmode, ptype, true);
}
- this_rtx = sh_function_arg (&cum, Pmode, ptr_type_node, true);
+ this_rtx
+ = sh_function_arg (pack_cumulative_args (&cum), Pmode, ptr_type_node, true);
/* For SHcompact, we only have r0 for a scratch register: r1 is the
static chain pointer (even if you can't have nested virtual functions
static enum machine_mode sparc_promote_function_mode (const_tree, enum machine_mode,
int *, const_tree, int);
static bool sparc_return_in_memory (const_tree, const_tree);
-static bool sparc_strict_argument_naming (CUMULATIVE_ARGS *);
+static bool sparc_strict_argument_naming (cumulative_args_t);
static void sparc_va_start (tree, rtx);
static tree sparc_gimplify_va_arg (tree, tree, gimple_seq *, gimple_seq *);
static bool sparc_vector_mode_supported_p (enum machine_mode);
static rtx sparc_legitimize_address (rtx, rtx, enum machine_mode);
static rtx sparc_delegitimize_address (rtx);
static bool sparc_mode_dependent_address_p (const_rtx);
-static bool sparc_pass_by_reference (CUMULATIVE_ARGS *,
+static bool sparc_pass_by_reference (cumulative_args_t,
enum machine_mode, const_tree, bool);
-static void sparc_function_arg_advance (CUMULATIVE_ARGS *,
+static void sparc_function_arg_advance (cumulative_args_t,
enum machine_mode, const_tree, bool);
-static rtx sparc_function_arg_1 (const CUMULATIVE_ARGS *,
+static rtx sparc_function_arg_1 (cumulative_args_t,
enum machine_mode, const_tree, bool, bool);
-static rtx sparc_function_arg (CUMULATIVE_ARGS *,
+static rtx sparc_function_arg (cumulative_args_t,
enum machine_mode, const_tree, bool);
-static rtx sparc_function_incoming_arg (CUMULATIVE_ARGS *,
+static rtx sparc_function_incoming_arg (cumulative_args_t,
enum machine_mode, const_tree, bool);
static unsigned int sparc_function_arg_boundary (enum machine_mode,
const_tree);
-static int sparc_arg_partial_bytes (CUMULATIVE_ARGS *,
+static int sparc_arg_partial_bytes (cumulative_args_t,
enum machine_mode, tree, bool);
static void sparc_dwarf_handle_frame_unspec (const char *, rtx, int);
static void sparc_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
/* Handle the TARGET_STRICT_ARGUMENT_NAMING target hook. */
static bool
-sparc_strict_argument_naming (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED)
+sparc_strict_argument_naming (cumulative_args_t ca ATTRIBUTE_UNUSED)
{
return TARGET_ARCH64 ? true : false;
}
TARGET_FUNCTION_INCOMING_ARG. */
static rtx
-sparc_function_arg_1 (const CUMULATIVE_ARGS *cum, enum machine_mode mode,
+sparc_function_arg_1 (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named, bool incoming_p)
{
+ const CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
int regbase = (incoming_p
? SPARC_INCOMING_INT_ARG_FIRST
: SPARC_OUTGOING_INT_ARG_FIRST);
/* Handle the TARGET_FUNCTION_ARG target hook. */
static rtx
-sparc_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+sparc_function_arg (cumulative_args_t cum, enum machine_mode mode,
const_tree type, bool named)
{
return sparc_function_arg_1 (cum, mode, type, named, false);
/* Handle the TARGET_FUNCTION_INCOMING_ARG target hook. */
static rtx
-sparc_function_incoming_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+sparc_function_incoming_arg (cumulative_args_t cum, enum machine_mode mode,
const_tree type, bool named)
{
return sparc_function_arg_1 (cum, mode, type, named, true);
mode] will be split between that reg and memory. */
static int
-sparc_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+sparc_arg_partial_bytes (cumulative_args_t cum, enum machine_mode mode,
tree type, bool named)
{
int slotno, regno, padding;
/* We pass false for incoming_p here, it doesn't matter. */
- slotno = function_arg_slotno (cum, mode, type, named, false,
- ®no, &padding);
+ slotno = function_arg_slotno (get_cumulative_args (cum), mode, type, named,
+ false, ®no, &padding);
if (slotno == -1)
return 0;
Specify whether to pass the argument by reference. */
static bool
-sparc_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
+sparc_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
enum machine_mode mode, const_tree type,
bool named ATTRIBUTE_UNUSED)
{
TYPE is null for libcalls where that information may not be available. */
static void
-sparc_function_arg_advance (struct sparc_args *cum, enum machine_mode mode,
+sparc_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
int regno, padding;
/* We pass false for incoming_p here, it doesn't matter. */
extern bool spu_legitimate_constant_p (enum machine_mode, rtx);
extern int spu_initial_elimination_offset (int from, int to);
extern rtx spu_function_value (const_tree type, const_tree func);
-extern void spu_setup_incoming_varargs (int *cum, enum machine_mode mode,
- tree type, int *pretend_size,
- int no_rtl);
extern int spu_expand_mov (rtx * ops, enum machine_mode mode);
extern int spu_split_load (rtx * ops);
extern int spu_split_store (rtx * ops);
int flags,
bool *no_add_attrs);
static int spu_naked_function_p (tree func);
-static bool spu_pass_by_reference (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+static bool spu_pass_by_reference (cumulative_args_t cum,
+ enum machine_mode mode,
const_tree type, bool named);
-static rtx spu_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+static rtx spu_function_arg (cumulative_args_t cum, enum machine_mode mode,
const_tree type, bool named);
-static void spu_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+static void spu_function_arg_advance (cumulative_args_t cum,
+ enum machine_mode mode,
const_tree type, bool named);
static tree spu_build_builtin_va_list (void);
static void spu_va_start (tree, rtx);
#undef TARGET_EXPAND_BUILTIN_VA_START
#define TARGET_EXPAND_BUILTIN_VA_START spu_va_start
+static void spu_setup_incoming_varargs (cumulative_args_t cum,
+ enum machine_mode mode,
+ tree type, int *pretend_size,
+ int no_rtl);
#undef TARGET_SETUP_INCOMING_VARARGS
#define TARGET_SETUP_INCOMING_VARARGS spu_setup_incoming_varargs
}
static rtx
-spu_function_arg (CUMULATIVE_ARGS *cum,
+spu_function_arg (cumulative_args_t cum_v,
enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
int byte_size;
if (*cum >= MAX_REGISTER_ARGS)
}
static void
-spu_function_arg_advance (CUMULATIVE_ARGS * cum, enum machine_mode mode,
+spu_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
*cum += (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
? 1
: mode == BLKmode
/* Variable sized types are passed by reference. */
static bool
-spu_pass_by_reference (CUMULATIVE_ARGS * cum ATTRIBUTE_UNUSED,
+spu_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
enum machine_mode mode ATTRIBUTE_UNUSED,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
to the first unnamed parameters. If the first unnamed parameter is
in the stack then save no registers. Set pretend_args_size to the
amount of space needed to save the registers. */
-void
-spu_setup_incoming_varargs (CUMULATIVE_ARGS * cum, enum machine_mode mode,
+static void
+spu_setup_incoming_varargs (cumulative_args_t cum, enum machine_mode mode,
tree type, int *pretend_size, int no_rtl)
{
if (!no_rtl)
rtx tmp;
int regno;
int offset;
- int ncum = *cum;
+ int ncum = *get_cumulative_args (cum);
/* cum currently points to the last named argument, we want to
start at the next argument. */
- spu_function_arg_advance (&ncum, mode, type, true);
+ spu_function_arg_advance (pack_cumulative_args (&ncum), mode, type, true);
offset = -STACK_POINTER_OFFSET;
for (regno = ncum; regno < MAX_REGISTER_ARGS; regno++)
the word count. */
static void
-xstormy16_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+xstormy16_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
/* If an argument would otherwise be passed partially in registers,
and partially on the stack, the whole of it is passed on the
stack. */
}
static rtx
-xstormy16_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+xstormy16_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
if (mode == VOIDmode)
return const0_rtx;
if (targetm.calls.must_pass_in_stack (mode, type)
Specify whether to pass the argument by reference. */
static bool
-v850_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
+v850_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
enum machine_mode mode, const_tree type,
bool named ATTRIBUTE_UNUSED)
{
/* Implementing the Varargs Macros. */
static bool
-v850_strict_argument_naming (CUMULATIVE_ARGS * ca ATTRIBUTE_UNUSED)
+v850_strict_argument_naming (cumulative_args_t ca ATTRIBUTE_UNUSED)
{
return !TARGET_GHS ? true : false;
}
is NULL_RTX, the argument will be pushed. */
static rtx
-v850_function_arg (CUMULATIVE_ARGS * cum, enum machine_mode mode,
+v850_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
rtx result = NULL_RTX;
int size, align;
/* Return the number of bytes which must be put into registers
for values which are part in registers and part in memory. */
static int
-v850_arg_partial_bytes (CUMULATIVE_ARGS * cum, enum machine_mode mode,
+v850_arg_partial_bytes (cumulative_args_t cum_v, enum machine_mode mode,
tree type, bool named)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
int size, align;
if (TARGET_GHS && !named)
(TYPE is null for libcalls where that information may not be available.) */
static void
-v850_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+v850_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
cum->nbytes += (((type && int_size_in_bytes (type) > 8
? GET_MODE_SIZE (Pmode)
: (mode != BLKmode
/* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
static void
-v850_setup_incoming_varargs (CUMULATIVE_ARGS *ca,
+v850_setup_incoming_varargs (cumulative_args_t ca,
enum machine_mode mode ATTRIBUTE_UNUSED,
tree type ATTRIBUTE_UNUSED,
int *pretend_arg_size ATTRIBUTE_UNUSED,
int second_time ATTRIBUTE_UNUSED)
{
- ca->anonymous_args = (!TARGET_GHS ? 1 : 0);
+ get_cumulative_args (ca)->anonymous_args = (!TARGET_GHS ? 1 : 0);
}
/* Worker function for TARGET_CAN_ELIMINATE. */
static int vax_address_cost_1 (rtx);
static int vax_address_cost (rtx, bool);
static bool vax_rtx_costs (rtx, int, int, int *, bool);
-static rtx vax_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
+static rtx vax_function_arg (cumulative_args_t, enum machine_mode,
const_tree, bool);
-static void vax_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
+static void vax_function_arg_advance (cumulative_args_t, enum machine_mode,
const_tree, bool);
static rtx vax_struct_value_rtx (tree, int);
static rtx vax_builtin_setjmp_frame_value (void);
/* On the VAX all args are pushed. */
static rtx
-vax_function_arg (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
+vax_function_arg (cumulative_args_t cum ATTRIBUTE_UNUSED,
enum machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
may not be available.) */
static void
-vax_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+vax_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
+
*cum += (mode != BLKmode
? (GET_MODE_SIZE (mode) + 3) & ~3
: (int_size_in_bytes (type) + 3) & ~3);
static bool xtensa_return_in_memory (const_tree, const_tree);
static tree xtensa_gimplify_va_arg_expr (tree, tree, gimple_seq *,
gimple_seq *);
-static void xtensa_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
+static void xtensa_function_arg_advance (cumulative_args_t, enum machine_mode,
const_tree, bool);
-static rtx xtensa_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
+static rtx xtensa_function_arg (cumulative_args_t, enum machine_mode,
const_tree, bool);
-static rtx xtensa_function_incoming_arg (CUMULATIVE_ARGS *,
+static rtx xtensa_function_incoming_arg (cumulative_args_t,
enum machine_mode, const_tree, bool);
static rtx xtensa_function_value (const_tree, const_tree, bool);
static rtx xtensa_libcall_value (enum machine_mode, const_rtx);
/* Advance the argument to the next argument position. */
static void
-xtensa_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+xtensa_function_arg_advance (cumulative_args_t cum, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
int words, max;
int *arg_words;
- arg_words = &cum->arg_words;
+ arg_words = &get_cumulative_args (cum)->arg_words;
max = MAX_ARGS_IN_REGISTERS;
words = (((mode != BLKmode)
if this is an incoming argument to the current function. */
static rtx
-xtensa_function_arg_1 (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+xtensa_function_arg_1 (cumulative_args_t cum_v, enum machine_mode mode,
const_tree type, bool incoming_p)
{
+ CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
int regbase, words, max;
int *arg_words;
int regno;
/* Implement TARGET_FUNCTION_ARG. */
static rtx
-xtensa_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+xtensa_function_arg (cumulative_args_t cum, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
return xtensa_function_arg_1 (cum, mode, type, false);
/* Implement TARGET_FUNCTION_INCOMING_ARG. */
static rtx
-xtensa_function_incoming_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
+xtensa_function_incoming_arg (cumulative_args_t cum, enum machine_mode mode,
const_tree type, bool named ATTRIBUTE_UNUSED)
{
return xtensa_function_arg_1 (cum, mode, type, true);
types of arguments are passed in registers or how they are arranged in
the stack.
-@deftypefn {Target Hook} rtx TARGET_FUNCTION_ARG (CUMULATIVE_ARGS *@var{ca}, enum machine_mode @var{mode}, const_tree @var{type}, bool @var{named})
+@deftypefn {Target Hook} rtx TARGET_FUNCTION_ARG (cumulative_args_t @var{ca}, enum machine_mode @var{mode}, const_tree @var{type}, bool @var{named})
Return an RTX indicating whether a function argument is passed in a
register and if so, which register.
documentation.
@end deftypefn
-@deftypefn {Target Hook} rtx TARGET_FUNCTION_INCOMING_ARG (CUMULATIVE_ARGS *@var{ca}, enum machine_mode @var{mode}, const_tree @var{type}, bool @var{named})
+@deftypefn {Target Hook} rtx TARGET_FUNCTION_INCOMING_ARG (cumulative_args_t @var{ca}, enum machine_mode @var{mode}, const_tree @var{type}, bool @var{named})
Define this hook if the target machine has ``register windows'', so
that the register in which a function sees an arguments is not
necessarily the same as the one in which the caller passed the
@code{TARGET_FUNCTION_ARG} serves both purposes.
@end deftypefn
-@deftypefn {Target Hook} int TARGET_ARG_PARTIAL_BYTES (CUMULATIVE_ARGS *@var{cum}, enum machine_mode @var{mode}, tree @var{type}, bool @var{named})
+@deftypefn {Target Hook} int TARGET_ARG_PARTIAL_BYTES (cumulative_args_t @var{cum}, enum machine_mode @var{mode}, tree @var{type}, bool @var{named})
This target hook returns the number of bytes at the beginning of an
argument that must be put in registers. The value must be zero for
arguments that are passed entirely in registers or that are entirely
@code{TARGET_FUNCTION_INCOMING_ARG}, for the called function.
@end deftypefn
-@deftypefn {Target Hook} bool TARGET_PASS_BY_REFERENCE (CUMULATIVE_ARGS *@var{cum}, enum machine_mode @var{mode}, const_tree @var{type}, bool @var{named})
+@deftypefn {Target Hook} bool TARGET_PASS_BY_REFERENCE (cumulative_args_t @var{cum}, enum machine_mode @var{mode}, const_tree @var{type}, bool @var{named})
This target hook should return @code{true} if an argument at the
position indicated by @var{cum} should be passed by reference. This
predicate is queried after target independent reasons for being
to that type.
@end deftypefn
-@deftypefn {Target Hook} bool TARGET_CALLEE_COPIES (CUMULATIVE_ARGS *@var{cum}, enum machine_mode @var{mode}, const_tree @var{type}, bool @var{named})
+@deftypefn {Target Hook} bool TARGET_CALLEE_COPIES (cumulative_args_t @var{cum}, enum machine_mode @var{mode}, const_tree @var{type}, bool @var{named})
The function argument described by the parameters to this hook is
known to be passed by reference. The hook should return true if the
function argument should be copied by the callee instead of copied
@c --mew 5feb93 i switched the order of the sentences. --mew 10feb93
@end defmac
-@deftypefn {Target Hook} void TARGET_FUNCTION_ARG_ADVANCE (CUMULATIVE_ARGS *@var{ca}, enum machine_mode @var{mode}, const_tree @var{type}, bool @var{named})
+@deftypefn {Target Hook} void TARGET_FUNCTION_ARG_ADVANCE (cumulative_args_t @var{ca}, enum machine_mode @var{mode}, const_tree @var{type}, bool @var{named})
This hook updates the summarizer variable pointed to by @var{ca} to
advance past an argument in the argument list. The values @var{mode},
@var{type} and @var{named} describe that argument. Once this is done,
to use as the return of @code{__builtin_saveregs}.
@end deftypefn
-@deftypefn {Target Hook} void TARGET_SETUP_INCOMING_VARARGS (CUMULATIVE_ARGS *@var{args_so_far}, enum machine_mode @var{mode}, tree @var{type}, int *@var{pretend_args_size}, int @var{second_time})
+@deftypefn {Target Hook} void TARGET_SETUP_INCOMING_VARARGS (cumulative_args_t @var{args_so_far}, enum machine_mode @var{mode}, tree @var{type}, int *@var{pretend_args_size}, int @var{second_time})
This target hook offers an alternative to using
@code{__builtin_saveregs} and defining the hook
@code{TARGET_EXPAND_BUILTIN_SAVEREGS}. Use it to store the anonymous
not generate any instructions in this case.
@end deftypefn
-@deftypefn {Target Hook} bool TARGET_STRICT_ARGUMENT_NAMING (CUMULATIVE_ARGS *@var{ca})
+@deftypefn {Target Hook} bool TARGET_STRICT_ARGUMENT_NAMING (cumulative_args_t @var{ca})
Define this hook to return @code{true} if the location where a function
argument is passed depends on whether or not it is a named argument.
You need not define this hook if it always returns @code{false}.
@end deftypefn
-@deftypefn {Target Hook} bool TARGET_PRETEND_OUTGOING_VARARGS_NAMED (CUMULATIVE_ARGS *@var{ca})
+@deftypefn {Target Hook} bool TARGET_PRETEND_OUTGOING_VARARGS_NAMED (cumulative_args_t @var{ca})
If you need to conditionally change ABIs so that one works with
@code{TARGET_SETUP_INCOMING_VARARGS}, but the other works like neither
@code{TARGET_SETUP_INCOMING_VARARGS} nor @code{TARGET_STRICT_ARGUMENT_NAMING} was
static bool
get_call_args (rtx call_insn, tree fn, rtx *args, int nargs)
{
- CUMULATIVE_ARGS args_so_far;
+ CUMULATIVE_ARGS args_so_far_v;
+ cumulative_args_t args_so_far;
tree arg;
int idx;
- INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
+ INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
+ args_so_far = pack_cumulative_args (&args_so_far_v);
arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
for (idx = 0;
{
enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
rtx reg, link, tmp;
- reg = targetm.calls.function_arg (&args_so_far, mode, NULL_TREE, true);
+ reg = targetm.calls.function_arg (args_so_far, mode, NULL_TREE, true);
if (!reg || !REG_P (reg) || GET_MODE (reg) != mode
|| GET_MODE_CLASS (mode) != MODE_INT)
return false;
if (tmp)
args[idx] = tmp;
- targetm.calls.function_arg_advance (&args_so_far, mode, NULL_TREE, true);
+ targetm.calls.function_arg_advance (args_so_far, mode, NULL_TREE, true);
}
if (arg != void_list_node || idx != nargs)
return false;
/* If any argument goes in memory, then it might clobber an outgoing
argument. */
{
- CUMULATIVE_ARGS args_so_far;
+ CUMULATIVE_ARGS args_so_far_v;
+ cumulative_args_t args_so_far;
tree fn, arg;
fn = emit_block_move_libcall_fn (false);
- INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
+ INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
+ args_so_far = pack_cumulative_args (&args_so_far_v);
arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
{
enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
- rtx tmp = targetm.calls.function_arg (&args_so_far, mode,
+ rtx tmp = targetm.calls.function_arg (args_so_far, mode,
NULL_TREE, true);
if (!tmp || !REG_P (tmp))
return false;
- if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
+ if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
return false;
- targetm.calls.function_arg_advance (&args_so_far, mode,
+ targetm.calls.function_arg_advance (args_so_far, mode,
NULL_TREE, true);
}
}
}
}
- return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
+ return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
+ type, named_arg);
}
/* Return true if TYPE, which is passed by reference, should be callee
{
if (type && TREE_ADDRESSABLE (type))
return false;
- return targetm.calls.callee_copies (ca, mode, type, named_arg);
+ return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
+ named_arg);
}
/* Structures to communicate between the subroutines of assign_parms.
struct assign_parm_data_all
{
- CUMULATIVE_ARGS args_so_far;
+ /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
+ should become a job of the target or otherwise encapsulated. */
+ CUMULATIVE_ARGS args_so_far_v;
+ cumulative_args_t args_so_far;
struct args_size stack_args_size;
tree function_result_decl;
tree orig_fnargs;
fntype = TREE_TYPE (current_function_decl);
#ifdef INIT_CUMULATIVE_INCOMING_ARGS
- INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
+ INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
#else
- INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
+ INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
current_function_decl, -1);
#endif
+ all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
#ifdef REG_PARM_STACK_SPACE
all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
data->named_arg = 1; /* No variadic parms. */
else if (DECL_CHAIN (parm))
data->named_arg = 1; /* Not the last non-variadic parm. */
- else if (targetm.calls.strict_argument_naming (&all->args_so_far))
+ else if (targetm.calls.strict_argument_naming (all->args_so_far))
data->named_arg = 1; /* Only variadic ones are unnamed. */
else
data->named_arg = 0; /* Treat as variadic. */
passed_type = TREE_TYPE (first_field (passed_type));
/* See if this arg was passed by invisible reference. */
- if (pass_by_reference (&all->args_so_far, passed_mode,
+ if (pass_by_reference (&all->args_so_far_v, passed_mode,
passed_type, data->named_arg))
{
passed_type = nominal_type = build_pointer_type (passed_type);
{
int varargs_pretend_bytes = 0;
- targetm.calls.setup_incoming_varargs (&all->args_so_far,
+ targetm.calls.setup_incoming_varargs (all->args_so_far,
data->promoted_mode,
data->passed_type,
&varargs_pretend_bytes, no_rtl);
return;
}
- entry_parm = targetm.calls.function_incoming_arg (&all->args_so_far,
+ entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
data->promoted_mode,
data->passed_type,
data->named_arg);
#endif
if (!in_regs && !data->named_arg)
{
- if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
+ if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
{
rtx tem;
- tem = targetm.calls.function_incoming_arg (&all->args_so_far,
+ tem = targetm.calls.function_incoming_arg (all->args_so_far,
data->promoted_mode,
data->passed_type, true);
in_regs = tem != NULL;
{
int partial;
- partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
+ partial = targetm.calls.arg_partial_bytes (all->args_so_far,
data->promoted_mode,
data->passed_type,
data->named_arg);
set_decl_incoming_rtl (parm, data.entry_parm, false);
/* Update info on where next arg arrives in registers. */
- targetm.calls.function_arg_advance (&all.args_so_far, data.promoted_mode,
+ targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
data.passed_type, data.named_arg);
assign_parm_adjust_stack_rtl (&data);
/* For stdarg.h function, save info about
regs and stack space used by the named args. */
- crtl->args.info = all.args_so_far;
+ crtl->args.info = all.args_so_far_v;
/* Set the rtx used for the function return value. Put this in its
own variable so any optimizers that need this information don't have
continue;
/* Update info on where next arg arrives in registers. */
- targetm.calls.function_arg_advance (&all.args_so_far, data.promoted_mode,
+ targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
data.passed_type, data.named_arg);
/* ??? Once upon a time variable_size stuffed parameter list
if (data.passed_pointer)
{
tree type = TREE_TYPE (data.passed_type);
- if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
+ if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
type, data.named_arg))
{
tree local, t;
(pass_by_reference,
"",
bool,
- (CUMULATIVE_ARGS *cum, enum machine_mode mode, const_tree type, bool named),
+ (cumulative_args_t cum, enum machine_mode mode, const_tree type, bool named),
hook_bool_CUMULATIVE_ARGS_mode_tree_bool_false)
DEFHOOK
DEFHOOK
(setup_incoming_varargs,
"",
- void, (CUMULATIVE_ARGS *args_so_far, enum machine_mode mode, tree type,
+ void, (cumulative_args_t args_so_far, enum machine_mode mode, tree type,
int *pretend_args_size, int second_time),
default_setup_incoming_varargs)
DEFHOOK
(strict_argument_naming,
"",
- bool, (CUMULATIVE_ARGS *ca),
+ bool, (cumulative_args_t ca),
hook_bool_CUMULATIVE_ARGS_false)
/* Returns true if we should use
DEFHOOK
(pretend_outgoing_varargs_named,
"",
- bool, (CUMULATIVE_ARGS *ca),
+ bool, (cumulative_args_t ca),
default_pretend_outgoing_varargs_named)
/* Given a complex type T, return true if a parameter of type T
(callee_copies,
"",
bool,
- (CUMULATIVE_ARGS *cum, enum machine_mode mode, const_tree type, bool named),
+ (cumulative_args_t cum, enum machine_mode mode, const_tree type, bool named),
hook_bool_CUMULATIVE_ARGS_mode_tree_bool_false)
/* Return zero for arguments passed entirely on the stack or entirely
DEFHOOK
(arg_partial_bytes,
"",
- int, (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type, bool named),
+ int, (cumulative_args_t cum, enum machine_mode mode, tree type, bool named),
hook_int_CUMULATIVE_ARGS_mode_tree_bool_0)
/* Update the state in CA to advance past an argument in the
(function_arg_advance,
"",
void,
- (CUMULATIVE_ARGS *ca, enum machine_mode mode, const_tree type, bool named),
+ (cumulative_args_t ca, enum machine_mode mode, const_tree type, bool named),
default_function_arg_advance)
/* Return zero if the argument described by the state of CA should
DEFHOOK
(function_arg,
"",
- rtx, (CUMULATIVE_ARGS *ca, enum machine_mode mode, const_tree type,
+ rtx, (cumulative_args_t ca, enum machine_mode mode, const_tree type,
bool named),
default_function_arg)
DEFHOOK
(function_incoming_arg,
"",
- rtx, (CUMULATIVE_ARGS *ca, enum machine_mode mode, const_tree type,
+ rtx, (cumulative_args_t ca, enum machine_mode mode, const_tree type,
bool named),
default_function_incoming_arg)
/* Data structure definitions for a generic GCC target.
- Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
+ Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+ 2011
Free Software Foundation, Inc.
This program is free software; you can redistribute it and/or modify it
#ifndef GCC_TARGET_H
#define GCC_TARGET_H
-#include "tm.h"
#include "insn-modes.h"
+#ifdef ENABLE_CHECKING
+
+typedef struct { void *magic; void *p; } cumulative_args_t;
+
+#else /* !ENABLE_CHECKING */
+
+/* When using a GCC build compiler, we could use
+ __attribute__((transparent_union)) to get cumulative_args_t function
+ arguments passed like scalars where the ABI would mandate a less
+ efficient way of argument passing otherwise. However, that would come
+ at the cost of less type-safe !ENABLE_CHECKING compilation. */
+
+typedef union { void *p; } cumulative_args_t;
+
+#endif /* !ENABLE_CHECKING */
+
/* Types used by the record_gcc_switches() target function. */
typedef enum
{
extern struct gcc_target targetm;
+#ifdef GCC_TM_H
+
+#ifndef CUMULATIVE_ARGS_MAGIC
+#define CUMULATIVE_ARGS_MAGIC ((void *) &targetm.calls)
+#endif
+
+static inline CUMULATIVE_ARGS *
+get_cumulative_args (cumulative_args_t arg)
+{
+#ifdef ENABLE_CHECKING
+ gcc_assert (arg.magic == CUMULATIVE_ARGS_MAGIC);
+#endif /* ENABLE_CHECKING */
+ return (CUMULATIVE_ARGS *) arg.p;
+}
+
+static inline cumulative_args_t
+pack_cumulative_args (CUMULATIVE_ARGS *arg)
+{
+ cumulative_args_t ret;
+
+#ifdef ENABLE_CHECKING
+ ret.magic = CUMULATIVE_ARGS_MAGIC;
+#endif /* ENABLE_CHECKING */
+ ret.p = (void *) arg;
+ return ret;
+}
+#endif /* GCC_TM_H */
+
#endif /* GCC_TARGET_H */
}
void
-default_setup_incoming_varargs (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
+default_setup_incoming_varargs (cumulative_args_t ca ATTRIBUTE_UNUSED,
enum machine_mode mode ATTRIBUTE_UNUSED,
tree type ATTRIBUTE_UNUSED,
int *pretend_arg_size ATTRIBUTE_UNUSED,
/* Generic hook that takes a CUMULATIVE_ARGS pointer and returns false. */
bool
-hook_bool_CUMULATIVE_ARGS_false (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED)
+hook_bool_CUMULATIVE_ARGS_false (cumulative_args_t ca ATTRIBUTE_UNUSED)
{
return false;
}
bool
-default_pretend_outgoing_varargs_named (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED)
+default_pretend_outgoing_varargs_named (cumulative_args_t ca ATTRIBUTE_UNUSED)
{
return (targetm.calls.setup_incoming_varargs
!= default_setup_incoming_varargs);
/* Generic hook that takes a CUMULATIVE_ARGS pointer and returns true. */
bool
-hook_bool_CUMULATIVE_ARGS_true (CUMULATIVE_ARGS * a ATTRIBUTE_UNUSED)
+hook_bool_CUMULATIVE_ARGS_true (cumulative_args_t a ATTRIBUTE_UNUSED)
{
return true;
}
of the TARGET_PASS_BY_REFERENCE hook uses just MUST_PASS_IN_STACK. */
bool
-hook_pass_by_reference_must_pass_in_stack (CUMULATIVE_ARGS *c ATTRIBUTE_UNUSED,
+hook_pass_by_reference_must_pass_in_stack (cumulative_args_t c ATTRIBUTE_UNUSED,
enum machine_mode mode ATTRIBUTE_UNUSED, const_tree type ATTRIBUTE_UNUSED,
bool named_arg ATTRIBUTE_UNUSED)
{
version of the hook is true for all named arguments. */
bool
-hook_callee_copies_named (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
+hook_callee_copies_named (cumulative_args_t ca ATTRIBUTE_UNUSED,
enum machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED, bool named)
{
bool
hook_bool_CUMULATIVE_ARGS_mode_tree_bool_false (
- CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
+ cumulative_args_t ca ATTRIBUTE_UNUSED,
enum machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
{
bool
hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true (
- CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
+ cumulative_args_t ca ATTRIBUTE_UNUSED,
enum machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
{
int
hook_int_CUMULATIVE_ARGS_mode_tree_bool_0 (
- CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
+ cumulative_args_t ca ATTRIBUTE_UNUSED,
enum machine_mode mode ATTRIBUTE_UNUSED,
tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
{
}
void
-default_function_arg_advance (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
+default_function_arg_advance (cumulative_args_t ca ATTRIBUTE_UNUSED,
enum machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
}
rtx
-default_function_arg (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
+default_function_arg (cumulative_args_t ca ATTRIBUTE_UNUSED,
enum machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
}
rtx
-default_function_incoming_arg (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
+default_function_incoming_arg (cumulative_args_t ca ATTRIBUTE_UNUSED,
enum machine_mode mode ATTRIBUTE_UNUSED,
const_tree type ATTRIBUTE_UNUSED,
bool named ATTRIBUTE_UNUSED)
extern bool default_return_in_memory (const_tree, const_tree);
extern rtx default_expand_builtin_saveregs (void);
-extern void default_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode, tree, int *, int);
+extern void default_setup_incoming_varargs (cumulative_args_t, enum machine_mode, tree, int *, int);
extern rtx default_builtin_setjmp_frame_value (void);
-extern bool default_pretend_outgoing_varargs_named (CUMULATIVE_ARGS *);
+extern bool default_pretend_outgoing_varargs_named (cumulative_args_t);
extern enum machine_mode default_eh_return_filter_mode (void);
extern enum machine_mode default_libgcc_cmp_return_mode (void);
extern tree default_cxx_get_cookie_size (tree);
extern bool hook_pass_by_reference_must_pass_in_stack
- (CUMULATIVE_ARGS *, enum machine_mode mode, const_tree, bool);
+ (cumulative_args_t, enum machine_mode mode, const_tree, bool);
extern bool hook_callee_copies_named
- (CUMULATIVE_ARGS *ca, enum machine_mode, const_tree, bool);
+ (cumulative_args_t ca, enum machine_mode, const_tree, bool);
extern void default_print_operand (FILE *, rtx, int);
extern void default_print_operand_address (FILE *, rtx);
/* These are here, and not in hooks.[ch], because not all users of
hooks.h include tm.h, and thus we don't have CUMULATIVE_ARGS. */
-extern bool hook_bool_CUMULATIVE_ARGS_false (CUMULATIVE_ARGS *);
-extern bool hook_bool_CUMULATIVE_ARGS_true (CUMULATIVE_ARGS *);
+extern bool hook_bool_CUMULATIVE_ARGS_false (cumulative_args_t);
+extern bool hook_bool_CUMULATIVE_ARGS_true (cumulative_args_t);
extern bool hook_bool_CUMULATIVE_ARGS_mode_tree_bool_false
- (CUMULATIVE_ARGS *, enum machine_mode, const_tree, bool);
+ (cumulative_args_t, enum machine_mode, const_tree, bool);
extern bool hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
- (CUMULATIVE_ARGS *, enum machine_mode, const_tree, bool);
+ (cumulative_args_t, enum machine_mode, const_tree, bool);
extern int hook_int_CUMULATIVE_ARGS_mode_tree_bool_0
- (CUMULATIVE_ARGS *, enum machine_mode, tree, bool);
+ (cumulative_args_t, enum machine_mode, tree, bool);
extern const char *hook_invalid_arg_for_unprototyped_fn
(const_tree, const_tree, const_tree);
extern void default_function_arg_advance
- (CUMULATIVE_ARGS *, enum machine_mode, const_tree, bool);
+ (cumulative_args_t, enum machine_mode, const_tree, bool);
extern rtx default_function_arg
- (CUMULATIVE_ARGS *, enum machine_mode, const_tree, bool);
+ (cumulative_args_t, enum machine_mode, const_tree, bool);
extern rtx default_function_incoming_arg
- (CUMULATIVE_ARGS *, enum machine_mode, const_tree, bool);
+ (cumulative_args_t, enum machine_mode, const_tree, bool);
extern unsigned int default_function_arg_boundary (enum machine_mode,
const_tree);
extern bool hook_bool_const_rtx_commutative_p (const_rtx, int);
rtx this_arg = NULL_RTX;
tree type = NULL_TREE, t, fndecl = NULL_TREE;
tree obj_type_ref = NULL_TREE;
- CUMULATIVE_ARGS args_so_far;
+ CUMULATIVE_ARGS args_so_far_v;
+ cumulative_args_t args_so_far;
- memset (&args_so_far, 0, sizeof (args_so_far));
+ memset (&args_so_far_v, 0, sizeof (args_so_far_v));
+ args_so_far = pack_cumulative_args (&args_so_far_v);
if (GET_CODE (call) == PARALLEL)
call = XVECEXP (call, 0, 0);
if (GET_CODE (call) == SET)
tree struct_addr = build_pointer_type (TREE_TYPE (type));
enum machine_mode mode = TYPE_MODE (struct_addr);
rtx reg;
- INIT_CUMULATIVE_ARGS (args_so_far, type, NULL_RTX, fndecl,
+ INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
nargs + 1);
- reg = targetm.calls.function_arg (&args_so_far, mode,
+ reg = targetm.calls.function_arg (args_so_far, mode,
struct_addr, true);
- targetm.calls.function_arg_advance (&args_so_far, mode,
+ targetm.calls.function_arg_advance (args_so_far, mode,
struct_addr, true);
if (reg == NULL_RTX)
{
}
else
#endif
- INIT_CUMULATIVE_ARGS (args_so_far, type, NULL_RTX, fndecl,
+ INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
nargs);
if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
{
enum machine_mode mode;
t = TYPE_ARG_TYPES (type);
mode = TYPE_MODE (TREE_VALUE (t));
- this_arg = targetm.calls.function_arg (&args_so_far, mode,
+ this_arg = targetm.calls.function_arg (args_so_far, mode,
TREE_VALUE (t), true);
if (this_arg && !REG_P (this_arg))
this_arg = NULL_RTX;
tree argtype = TREE_VALUE (t);
enum machine_mode mode = TYPE_MODE (argtype);
rtx reg;
- if (pass_by_reference (&args_so_far, mode, argtype, true))
+ if (pass_by_reference (&args_so_far_v, mode, argtype, true))
{
argtype = build_pointer_type (argtype);
mode = TYPE_MODE (argtype);
}
- reg = targetm.calls.function_arg (&args_so_far, mode,
+ reg = targetm.calls.function_arg (args_so_far, mode,
argtype, true);
if (TREE_CODE (argtype) == REFERENCE_TYPE
&& INTEGRAL_TYPE_P (TREE_TYPE (argtype))
}
}
}
- targetm.calls.function_arg_advance (&args_so_far, mode,
+ targetm.calls.function_arg_advance (args_so_far, mode,
argtype, true);
t = TREE_CHAIN (t);
}