+2019-01-11 Steve Ellcey <sellcey@marvell.com>
+
+ * config/aarch64/aarch64.c (aarch64_simd_call_p): New function.
+ (aarch64_hard_regno_call_part_clobbered): Add insn argument.
+ (aarch64_return_call_with_max_clobbers): New function.
+ (TARGET_RETURN_CALL_WITH_MAX_CLOBBERS): New macro.
+ * config/avr/avr.c (avr_hard_regno_call_part_clobbered): Add insn
+ argument.
+ * config/i386/i386.c (ix86_hard_regno_call_part_clobbered): Ditto.
+ * config/mips/mips.c (mips_hard_regno_call_part_clobbered): Ditto.
+ * config/rs6000/rs6000.c (rs6000_hard_regno_call_part_clobbered): Ditto.
+ * config/s390/s390.c (s390_hard_regno_call_part_clobbered): Ditto.
+ * cselib.c (cselib_process_insn): Add argument to
+ targetm.hard_regno_call_part_clobbered call.
+ * ira-conflicts.c (ira_build_conflicts): Ditto.
+ * ira-costs.c (ira_tune_allocno_costs): Ditto.
+ * lra-constraints.c (inherit_reload_reg): Ditto.
+ * lra-int.h (struct lra_reg): Add call_insn field, remove call_p field.
+ * lra-lives.c (check_pseudos_live_through_calls): Add call_insn
+ argument. Call targetm.return_call_with_max_clobbers.
+ Add argument to targetm.hard_regno_call_part_clobbered call.
+ (calls_have_same_clobbers_p): New function.
+ (process_bb_lives): Add call_insn and last_call_insn variables.
+ Pass call_insn to check_pseudos_live_through_calls.
+ Modify if stmt to check targetm.return_call_with_max_clobbers.
+ Update setting of flush variable.
+ (lra_create_live_ranges_1): Set call_insn to NULL instead of call_p
+ to false.
+ * lra.c (initialize_lra_reg_info_element): Set call_insn to NULL.
+ * regcprop.c (copyprop_hardreg_forward_1): Add argument to
+ targetm.hard_regno_call_part_clobbered call.
+ * reginfo.c (choose_hard_reg_mode): Ditto.
+ * regrename.c (check_new_reg_p): Ditto.
+ * reload.c (find_equiv_reg): Ditto.
+ * reload1.c (emit_reload_insns): Ditto.
+ * sched-deps.c (deps_analyze_insn): Ditto.
+ * sel-sched.c (init_regs_for_mode): Ditto.
+ (mark_unavailable_hard_regs): Ditto.
+ * targhooks.c (default_dwarf_frame_reg_mode): Ditto.
+ * target.def (hard_regno_call_part_clobbered): Add insn argument.
+ (return_call_with_max_clobbers): New target function.
+ * doc/tm.texi: Regenerate.
+ * doc/tm.texi.in (TARGET_RETURN_CALL_WITH_MAX_CLOBBERS): New hook.
+ * hooks.c (hook_bool_uint_mode_false): Change to
+ hook_bool_insn_uint_mode_false.
+ * hooks.h (hook_bool_uint_mode_false): Ditto.
+
2019-01-11 Steve Ellcey <sellcey@marvell.com>
* config/aarch64/aarch64.c (aarch64_simd_call_p): New function.
clobbers the top 64 bits when restoring the bottom 64 bits. */
static bool
-aarch64_hard_regno_call_part_clobbered (unsigned int regno, machine_mode mode)
+aarch64_hard_regno_call_part_clobbered (rtx_insn *insn, unsigned int regno,
+ machine_mode mode)
{
- return FP_REGNUM_P (regno) && maybe_gt (GET_MODE_SIZE (mode), 8);
+ bool simd_p = insn && CALL_P (insn) && aarch64_simd_call_p (insn);
+ return FP_REGNUM_P (regno)
+ && maybe_gt (GET_MODE_SIZE (mode), simd_p ? 16 : 8);
+}
+
+/* Implement TARGET_RETURN_CALL_WITH_MAX_CLOBBERS. */
+
+rtx_insn *
+aarch64_return_call_with_max_clobbers (rtx_insn *call_1, rtx_insn *call_2)
+{
+ gcc_assert (CALL_P (call_1) && CALL_P (call_2));
+
+ if (!aarch64_simd_call_p (call_1) || aarch64_simd_call_p (call_2))
+ return call_1;
+ else
+ return call_2;
}
/* Implement REGMODE_NATURAL_SIZE. */
#define TARGET_REMOVE_EXTRA_CALL_PRESERVED_REGS \
aarch64_remove_extra_call_preserved_regs
+#undef TARGET_RETURN_CALL_WITH_MAX_CLOBBERS
+#define TARGET_RETURN_CALL_WITH_MAX_CLOBBERS \
+ aarch64_return_call_with_max_clobbers
+
#undef TARGET_CONSTANT_ALIGNMENT
#define TARGET_CONSTANT_ALIGNMENT aarch64_constant_alignment
/* Implement TARGET_HARD_REGNO_CALL_PART_CLOBBERED. */
static bool
-avr_hard_regno_call_part_clobbered (unsigned regno, machine_mode mode)
+avr_hard_regno_call_part_clobbered (rtx_insn *insn ATTRIBUTE_UNUSED,
+ unsigned regno, machine_mode mode)
{
/* FIXME: This hook gets called with MODE:REGNO combinations that don't
represent valid hard registers like, e.g. HI:29. Returning TRUE
the low 16 bytes are saved. */
static bool
-ix86_hard_regno_call_part_clobbered (unsigned int regno, machine_mode mode)
+ix86_hard_regno_call_part_clobbered (rtx_insn *insn ATTRIBUTE_UNUSED,
+ unsigned int regno, machine_mode mode)
{
return SSE_REGNO_P (regno) && GET_MODE_SIZE (mode) > 16;
}
registers with MODE > 64 bits are part clobbered too. */
static bool
-mips_hard_regno_call_part_clobbered (unsigned int regno, machine_mode mode)
+mips_hard_regno_call_part_clobbered (rtx_insn *insn ATTRIBUTE_UNUSED,
+ unsigned int regno, machine_mode mode)
{
if (TARGET_FLOATXX
&& hard_regno_nregs (regno, mode) == 1
/* Implement TARGET_HARD_REGNO_CALL_PART_CLOBBERED. */
static bool
-rs6000_hard_regno_call_part_clobbered (unsigned int regno, machine_mode mode)
+rs6000_hard_regno_call_part_clobbered (rtx_insn *insn ATTRIBUTE_UNUSED,
+ unsigned int regno, machine_mode mode)
{
if (TARGET_32BIT
&& TARGET_POWERPC64
bytes are saved across calls, however. */
static bool
-s390_hard_regno_call_part_clobbered (unsigned int regno, machine_mode mode)
+s390_hard_regno_call_part_clobbered (rtx_insn *insn ATTRIBUTE_UNUSED,
+ unsigned int regno, machine_mode mode)
{
if (!TARGET_64BIT
&& TARGET_ZARCH
if (call_used_regs[i]
|| (REG_VALUES (i) && REG_VALUES (i)->elt
&& (targetm.hard_regno_call_part_clobbered
- (i, GET_MODE (REG_VALUES (i)->elt->val_rtx)))))
+ (insn, i, GET_MODE (REG_VALUES (i)->elt->val_rtx)))))
cselib_invalidate_regno (i, reg_raw_mode[i]);
/* Since it is not clear how cselib is going to be used, be
@cindex call-used register
@cindex call-clobbered register
@cindex call-saved register
-@deftypefn {Target Hook} bool TARGET_HARD_REGNO_CALL_PART_CLOBBERED (unsigned int @var{regno}, machine_mode @var{mode})
+@deftypefn {Target Hook} bool TARGET_HARD_REGNO_CALL_PART_CLOBBERED (rtx_insn *@var{insn}, unsigned int @var{regno}, machine_mode @var{mode})
This hook should return true if @var{regno} is partly call-saved and
partly call-clobbered, and if a value of mode @var{mode} would be partly
-clobbered by a call. For example, if the low 32 bits of @var{regno} are
-preserved across a call but higher bits are clobbered, this hook should
-return true for a 64-bit mode but false for a 32-bit mode.
+clobbered by call instruction @var{insn}. If @var{insn} is NULL then it
+should return true if any call could partly clobber the register.
+For example, if the low 32 bits of @var{regno} are preserved across a call
+but higher bits are clobbered, this hook should return true for a 64-bit
+mode but false for a 32-bit mode.
The default implementation returns false, which is correct
for targets that don't have partly call-clobbered registers.
Defining the hook is purely an optimization.
@end deftypefn
+@deftypefn {Target Hook} {rtx_insn *} TARGET_RETURN_CALL_WITH_MAX_CLOBBERS (rtx_insn *@var{call_1}, rtx_insn *@var{call_2})
+This hook returns a pointer to the call that partially clobbers the
+most registers. If a platform supports multiple ABIs where the registers
+that are partially clobbered may vary, this function compares two
+calls and returns a pointer to the one that clobbers the most registers.
+If both calls clobber the same registers, @var{call_1} must be returned.
+
+The registers clobbered in different ABIs must be a proper subset or
+superset of all other ABIs. @var{call_1} must always be a call insn,
+call_2 may be NULL or a call insn.
+@end deftypefn
+
@findex fixed_regs
@findex call_used_regs
@findex global_regs
@hook TARGET_REMOVE_EXTRA_CALL_PRESERVED_REGS
+@hook TARGET_RETURN_CALL_WITH_MAX_CLOBBERS
+
@findex fixed_regs
@findex call_used_regs
@findex global_regs
/* Generic hook that takes (unsigned int, machine_mode) and returns false. */
bool
-hook_bool_uint_mode_false (unsigned int, machine_mode)
+hook_bool_insn_uint_mode_false (rtx_insn *, unsigned int, machine_mode)
{
return false;
}
extern bool hook_bool_mode_uhwi_false (machine_mode,
unsigned HOST_WIDE_INT);
extern bool hook_bool_puint64_puint64_true (poly_uint64, poly_uint64);
-extern bool hook_bool_uint_mode_false (unsigned int, machine_mode);
+extern bool hook_bool_insn_uint_mode_false (rtx_insn *, unsigned int,
+ machine_mode);
extern bool hook_bool_uint_mode_true (unsigned int, machine_mode);
extern bool hook_bool_tree_false (tree);
extern bool hook_bool_const_tree_false (const_tree);
regs must conflict with them. */
for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
if (!TEST_HARD_REG_BIT (call_used_reg_set, regno)
- && targetm.hard_regno_call_part_clobbered (regno,
+ && targetm.hard_regno_call_part_clobbered (NULL, regno,
obj_mode))
{
SET_HARD_REG_BIT (OBJECT_CONFLICT_HARD_REGS (obj), regno);
*crossed_calls_clobber_regs)
&& (ira_hard_reg_set_intersection_p (regno, mode,
call_used_reg_set)
- || targetm.hard_regno_call_part_clobbered (regno,
+ || targetm.hard_regno_call_part_clobbered (NULL, regno,
mode)))
cost += (ALLOCNO_CALL_FREQ (a)
* (ira_memory_move_cost[mode][rclass][0]
asm is removed and it can result in incorrect allocation. */
for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
if (lra_reg_info[i].nrefs != 0 && reg_renumber[i] >= 0
- && lra_reg_info[i].call_p
+ && lra_reg_info[i].call_insn
&& overlaps_hard_reg_set_p (call_used_reg_set,
PSEUDO_REGNO_MODE (i), reg_renumber[i]))
gcc_unreachable ();
: call_used_reg_set,
PSEUDO_REGNO_MODE (regno), reg_renumber[regno])
|| (targetm.hard_regno_call_part_clobbered
- (reg_renumber[regno], PSEUDO_REGNO_MODE (regno)))));
+ (lra_reg_info[regno].call_insn,
+ reg_renumber[regno], PSEUDO_REGNO_MODE (regno)))));
}
/* Global registers occurring in the current EBB. */
/* True if the pseudo should not be assigned to a stack register. */
bool no_stack_p;
#endif
- /* True if the pseudo crosses a call. It is setup in lra-lives.c
- and used to check that the pseudo crossing a call did not get a
- call used hard register. */
- bool call_p;
/* Number of references and execution frequencies of the register in
*non-debug* insns. */
int nrefs, freq;
int val;
/* Offset from relative eliminate register to pesudo reg. */
poly_int64 offset;
+ /* Call instruction, if any, that may affect this psuedo reg. */
+ rtx_insn *call_insn;
/* These members are set up in lra-lives.c and updated in
lra-coalesce.c. */
/* The biggest size mode in which each pseudo reg is referred in
/* Check that REGNO living through calls and setjumps, set up conflict
regs using LAST_CALL_USED_REG_SET, and clear corresponding bits in
- PSEUDOS_LIVE_THROUGH_CALLS and PSEUDOS_LIVE_THROUGH_SETJUMPS. */
+ PSEUDOS_LIVE_THROUGH_CALLS and PSEUDOS_LIVE_THROUGH_SETJUMPS.
+ CALL_INSN is a call that is representative of all calls in the region
+ described by the PSEUDOS_LIVE_THROUGH_* sets, in terms of the registers
+ that it preserves and clobbers. */
+
static inline void
check_pseudos_live_through_calls (int regno,
- HARD_REG_SET last_call_used_reg_set)
+ HARD_REG_SET last_call_used_reg_set,
+ rtx_insn *call_insn)
{
int hr;
+ rtx_insn *old_call_insn;
if (! sparseset_bit_p (pseudos_live_through_calls, regno))
return;
+
+ gcc_assert (call_insn && CALL_P (call_insn));
+ old_call_insn = lra_reg_info[regno].call_insn;
+ if (!old_call_insn
+ || (targetm.return_call_with_max_clobbers
+ && targetm.return_call_with_max_clobbers (old_call_insn, call_insn)
+ == call_insn))
+ lra_reg_info[regno].call_insn = call_insn;
+
sparseset_clear_bit (pseudos_live_through_calls, regno);
IOR_HARD_REG_SET (lra_reg_info[regno].conflict_hard_regs,
last_call_used_reg_set);
for (hr = 0; HARD_REGISTER_NUM_P (hr); hr++)
- if (targetm.hard_regno_call_part_clobbered (hr,
+ if (targetm.hard_regno_call_part_clobbered (call_insn, hr,
PSEUDO_REGNO_MODE (regno)))
add_to_hard_reg_set (&lra_reg_info[regno].conflict_hard_regs,
PSEUDO_REGNO_MODE (regno), hr);
- lra_reg_info[regno].call_p = true;
if (! sparseset_bit_p (pseudos_live_through_setjumps, regno))
return;
sparseset_clear_bit (pseudos_live_through_setjumps, regno);
&& TEST_BIT (reg->early_clobber_alts, n_alt))));
}
+/* Return true if call instructions CALL1 and CALL2 use ABIs that
+ preserve the same set of registers. */
+
+static bool
+calls_have_same_clobbers_p (rtx_insn *call1, rtx_insn *call2)
+{
+ if (!targetm.return_call_with_max_clobbers)
+ return false;
+
+ return (targetm.return_call_with_max_clobbers (call1, call2) == call1
+ && targetm.return_call_with_max_clobbers (call2, call1) == call2);
+}
+
/* Process insns of the basic block BB to update pseudo live ranges,
pseudo hard register conflicts, and insn notes. We do it on
backward scan of BB insns. CURR_POINT is the program point where
rtx link, *link_loc;
bool need_curr_point_incr;
HARD_REG_SET last_call_used_reg_set;
+ rtx_insn *call_insn = NULL;
+ rtx_insn *last_call_insn = NULL;
reg_live_out = df_get_live_out (bb);
sparseset_clear (pseudos_live);
update_pseudo_point (reg->regno, curr_point, USE_POINT);
mark_regno_live (reg->regno, reg->biggest_mode);
check_pseudos_live_through_calls (reg->regno,
- last_call_used_reg_set);
+ last_call_used_reg_set,
+ call_insn);
}
if (!HARD_REGISTER_NUM_P (reg->regno))
if (call_p)
{
- if (! flag_ipa_ra)
+ call_insn = curr_insn;
+ if (! flag_ipa_ra && ! targetm.return_call_with_max_clobbers)
COPY_HARD_REG_SET(last_call_used_reg_set, call_used_reg_set);
else
{
call_used_reg_set);
bool flush = (! hard_reg_set_empty_p (last_call_used_reg_set)
- && ! hard_reg_set_equal_p (last_call_used_reg_set,
- this_call_used_reg_set));
+ && ( ! hard_reg_set_equal_p (last_call_used_reg_set,
+ this_call_used_reg_set)))
+ || (last_call_insn && ! calls_have_same_clobbers_p
+ (call_insn,
+ last_call_insn));
EXECUTE_IF_SET_IN_SPARSESET (pseudos_live, j)
{
IOR_HARD_REG_SET (lra_reg_info[j].actual_call_used_reg_set,
this_call_used_reg_set);
+
if (flush)
- check_pseudos_live_through_calls
- (j, last_call_used_reg_set);
+ check_pseudos_live_through_calls (j,
+ last_call_used_reg_set,
+ last_call_insn);
}
COPY_HARD_REG_SET(last_call_used_reg_set, this_call_used_reg_set);
+ last_call_insn = call_insn;
}
sparseset_ior (pseudos_live_through_calls,
update_pseudo_point (reg->regno, curr_point, USE_POINT);
mark_regno_live (reg->regno, reg->biggest_mode);
check_pseudos_live_through_calls (reg->regno,
- last_call_used_reg_set);
+ last_call_used_reg_set,
+ call_insn);
}
for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
if (sparseset_cardinality (pseudos_live_through_calls) == 0)
break;
if (sparseset_bit_p (pseudos_live_through_calls, j))
- check_pseudos_live_through_calls (j, last_call_used_reg_set);
+ check_pseudos_live_through_calls (j, last_call_used_reg_set, call_insn);
}
for (i = 0; HARD_REGISTER_NUM_P (i); ++i)
lra_reg_info[i].biggest_mode = GET_MODE (regno_reg_rtx[i]);
else
lra_reg_info[i].biggest_mode = VOIDmode;
- lra_reg_info[i].call_p = false;
+ lra_reg_info[i].call_insn = NULL;
if (!HARD_REGISTER_NUM_P (i)
&& lra_reg_info[i].nrefs != 0)
{
lra_reg_info[i].val = get_new_reg_value ();
lra_reg_info[i].offset = 0;
lra_reg_info[i].copies = NULL;
+ lra_reg_info[i].call_insn = NULL;
}
/* Initialize common reg info and copies. */
for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
if ((TEST_HARD_REG_BIT (regs_invalidated_by_this_call, regno)
|| (targetm.hard_regno_call_part_clobbered
- (regno, vd->e[regno].mode)))
+ (insn, regno, vd->e[regno].mode)))
&& (regno < set_regno || regno >= set_regno + set_nregs))
kill_value_regno (regno, 1, vd);
if (hard_regno_nregs (regno, mode) == nregs
&& targetm.hard_regno_mode_ok (regno, mode)
&& (!call_saved
- || !targetm.hard_regno_call_part_clobbered (regno, mode))
+ || !targetm.hard_regno_call_part_clobbered (NULL, regno, mode))
&& maybe_gt (GET_MODE_SIZE (mode), GET_MODE_SIZE (found_mode)))
found_mode = mode;
if (hard_regno_nregs (regno, mode) == nregs
&& targetm.hard_regno_mode_ok (regno, mode)
&& (!call_saved
- || !targetm.hard_regno_call_part_clobbered (regno, mode))
+ || !targetm.hard_regno_call_part_clobbered (NULL, regno, mode))
&& maybe_gt (GET_MODE_SIZE (mode), GET_MODE_SIZE (found_mode)))
found_mode = mode;
if (hard_regno_nregs (regno, mode) == nregs
&& targetm.hard_regno_mode_ok (regno, mode)
&& (!call_saved
- || !targetm.hard_regno_call_part_clobbered (regno, mode))
+ || !targetm.hard_regno_call_part_clobbered (NULL, regno, mode))
&& maybe_gt (GET_MODE_SIZE (mode), GET_MODE_SIZE (found_mode)))
found_mode = mode;
if (hard_regno_nregs (regno, mode) == nregs
&& targetm.hard_regno_mode_ok (regno, mode)
&& (!call_saved
- || !targetm.hard_regno_call_part_clobbered (regno, mode))
+ || !targetm.hard_regno_call_part_clobbered (NULL, regno, mode))
&& maybe_gt (GET_MODE_SIZE (mode), GET_MODE_SIZE (found_mode)))
found_mode = mode;
if (hard_regno_nregs (regno, mode) == nregs
&& targetm.hard_regno_mode_ok (regno, mode)
&& (!call_saved
- || !targetm.hard_regno_call_part_clobbered (regno, mode)))
+ || !targetm.hard_regno_call_part_clobbered (NULL, regno, mode)))
return mode;
}
&& ! DEBUG_INSN_P (tmp->insn))
|| (this_head->need_caller_save_reg
&& ! (targetm.hard_regno_call_part_clobbered
- (reg, GET_MODE (*tmp->loc)))
+ (NULL, reg, GET_MODE (*tmp->loc)))
&& (targetm.hard_regno_call_part_clobbered
- (new_reg, GET_MODE (*tmp->loc)))))
+ (NULL, new_reg, GET_MODE (*tmp->loc)))))
return false;
return true;
if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
for (i = 0; i < nregs; ++i)
if (call_used_regs[regno + i]
- || targetm.hard_regno_call_part_clobbered (regno + i, mode))
+ || targetm.hard_regno_call_part_clobbered (NULL, regno + i,
+ mode))
return 0;
if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
for (i = 0; i < valuenregs; ++i)
if (call_used_regs[valueno + i]
- || targetm.hard_regno_call_part_clobbered (valueno + i,
+ || targetm.hard_regno_call_part_clobbered (NULL, valueno + i,
mode))
return 0;
}
: out_regno + k);
reg_reloaded_insn[regno + k] = insn;
SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
- if (targetm.hard_regno_call_part_clobbered (regno + k,
+ if (targetm.hard_regno_call_part_clobbered (NULL,
+ regno + k,
mode))
SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
regno + k);
: in_regno + k);
reg_reloaded_insn[regno + k] = insn;
SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
- if (targetm.hard_regno_call_part_clobbered (regno + k,
+ if (targetm.hard_regno_call_part_clobbered (NULL,
+ regno + k,
mode))
SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
regno + k);
CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + k);
SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + k);
if (targetm.hard_regno_call_part_clobbered
- (src_regno + k, mode))
+ (NULL, src_regno + k, mode))
SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
src_regno + k);
else
Since we only have a choice between 'might be clobbered'
and 'definitely not clobbered', we must include all
partly call-clobbered registers here. */
- else if (targetm.hard_regno_call_part_clobbered (i,
+ else if (targetm.hard_regno_call_part_clobbered (insn, i,
reg_raw_mode[i])
|| TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
SET_REGNO_REG_SET (reg_pending_clobbers, i);
if (i >= 0)
continue;
- if (targetm.hard_regno_call_part_clobbered (cur_reg, mode))
+ if (targetm.hard_regno_call_part_clobbered (NULL, cur_reg, mode))
SET_HARD_REG_BIT (sel_hrd.regs_for_call_clobbered[mode],
cur_reg);
/* Exclude registers that are partially call clobbered. */
if (def->crosses_call
- && !targetm.hard_regno_call_part_clobbered (regno, mode))
+ && !targetm.hard_regno_call_part_clobbered (NULL, regno, mode))
AND_COMPL_HARD_REG_SET (reg_rename_p->available_for_renaming,
sel_hrd.regs_for_call_clobbered[mode]);
(hard_regno_call_part_clobbered,
"This hook should return true if @var{regno} is partly call-saved and\n\
partly call-clobbered, and if a value of mode @var{mode} would be partly\n\
-clobbered by a call. For example, if the low 32 bits of @var{regno} are\n\
-preserved across a call but higher bits are clobbered, this hook should\n\
-return true for a 64-bit mode but false for a 32-bit mode.\n\
+clobbered by call instruction @var{insn}. If @var{insn} is NULL then it\n\
+should return true if any call could partly clobber the register.\n\
+For example, if the low 32 bits of @var{regno} are preserved across a call\n\
+but higher bits are clobbered, this hook should return true for a 64-bit\n\
+mode but false for a 32-bit mode.\n\
\n\
The default implementation returns false, which is correct\n\
for targets that don't have partly call-clobbered registers.",
- bool, (unsigned int regno, machine_mode mode),
- hook_bool_uint_mode_false)
+ bool, (rtx_insn *insn, unsigned int regno, machine_mode mode),
+ hook_bool_insn_uint_mode_false)
+
+DEFHOOK
+(return_call_with_max_clobbers,
+ "This hook returns a pointer to the call that partially clobbers the\n\
+most registers. If a platform supports multiple ABIs where the registers\n\
+that are partially clobbered may vary, this function compares two\n\
+calls and returns a pointer to the one that clobbers the most registers.\n\
+If both calls clobber the same registers, @var{call_1} must be returned.\n\
+\n\
+The registers clobbered in different ABIs must be a proper subset or\n\
+superset of all other ABIs. @var{call_1} must always be a call insn,\n\
+call_2 may be NULL or a call insn.",
+ rtx_insn *, (rtx_insn *call_1, rtx_insn *call_2),
+ NULL)
DEFHOOK
(remove_extra_call_preserved_regs,
{
machine_mode save_mode = reg_raw_mode[regno];
- if (targetm.hard_regno_call_part_clobbered (regno, save_mode))
+ if (targetm.hard_regno_call_part_clobbered (NULL, regno, save_mode))
save_mode = choose_hard_reg_mode (regno, 1, true);
return save_mode;
}