+2019-09-30 Richard Sandiford <richard.sandiford@arm.com>
+
+ * target.def (return_call_with_max_clobbers): Delete.
+ * doc/tm.texi.in (TARGET_RETURN_CALL_WITH_MAX_CLOBBERS): Delete.
+ * doc/tm.texi: Regenerate.
+ * config/aarch64/aarch64.c (aarch64_return_call_with_max_clobbers)
+ (TARGET_RETURN_CALL_WITH_MAX_CLOBBERS): Delete.
+ * lra-int.h (lra_reg::actual_call_used_reg_set): Delete.
+ (lra_reg::call_insn): Delete.
+ * lra.c: Include function-abi.h.
+ (initialize_lra_reg_info_element): Don't initialize the fields above.
+ (lra): Use crtl->abi to test whether the current function needs to
+ save a register in the prologue. Remove special pre-inheritance
+ lra_create_live_ranges pass for flag_ipa_ra.
+ * lra-assigns.c: Include function-abi.h
+ (find_hard_regno_for_1): Use crtl->abi to test whether the current
+ function needs to save a register in the prologue.
+ (lra_assign): Assert that registers aren't allocated to a
+ conflicting register, rather than checking only for overlaps
+ with call_used_or_fixed_regs. Do this even for flag_ipa_ra,
+ and for registers that are not live across a call.
+ * lra-constraints.c (last_call_for_abi): New variable.
+ (full_and_partial_call_clobbers): Likewise.
+ (setup_next_usage_insn): Remove the register from
+ full_and_partial_call_clobbers.
+ (need_for_call_save_p): Use call_clobbered_in_region_p to test
+ whether the register needs a caller save.
+ (need_for_split_p): Use full_and_partial_reg_clobbers instead
+ of call_used_or_fixed_regs.
+ (inherit_in_ebb): Initialize and maintain last_call_for_abi and
+ full_and_partial_call_clobbers.
+ * lra-lives.c (check_pseudos_live_through_calls): Replace
+ last_call_used_reg_set and call_insn arguments with an abi argument.
+ Remove handling of lra_reg::call_insn. Use function_abi::mode_clobbers
+ as the set of conflicting registers.
+ (calls_have_same_clobbers_p): Delete.
+ (process_bb_lives): Track the ABI of the last call instead of an
+ insn/HARD_REG_SET pair. Update calls to
+ check_pseudos_live_through_calls. Use eh_edge_abi to calculate
+ the set of registers that could be clobbered by an EH edge.
+ Include partially-clobbered as well as fully-clobbered registers.
+ (lra_create_live_ranges_1): Don't initialize lra_reg::call_insn.
+ * lra-remat.c: Include function-abi.h.
+ (call_used_regs_arr_len, call_used_regs_arr): Delete.
+ (set_bb_regs): Use insn_callee_abi to get the set of call-clobbered
+ registers and bitmap_view to combine them into dead_regs.
+ (call_used_input_regno_present_p): Take a function_abi argument
+ and use it to test whether a register is call-clobbered.
+ (calculate_gen_cands): Use insn_callee_abi to get the ABI of the
+ call insn target. Update tje call to call_used_input_regno_present_p.
+ (do_remat): Likewise.
+ (lra_remat): Remove the initialization of call_used_regs_arr_len
+ and call_used_regs_arr.
+
2019-09-30 Richard Sandiford <richard.sandiford@arm.com>
* loop-iv.c: Include regs.h and function-abi.h.
return false;
}
-/* Implement TARGET_RETURN_CALL_WITH_MAX_CLOBBERS. */
-
-rtx_insn *
-aarch64_return_call_with_max_clobbers (rtx_insn *call_1, rtx_insn *call_2)
-{
- gcc_assert (CALL_P (call_1) && CALL_P (call_2));
-
- if (!aarch64_simd_call_p (call_1) || aarch64_simd_call_p (call_2))
- return call_1;
- else
- return call_2;
-}
-
/* Implement REGMODE_NATURAL_SIZE. */
poly_uint64
aarch64_regmode_natural_size (machine_mode mode)
#undef TARGET_INSN_CALLEE_ABI
#define TARGET_INSN_CALLEE_ABI aarch64_insn_callee_abi
-#undef TARGET_RETURN_CALL_WITH_MAX_CLOBBERS
-#define TARGET_RETURN_CALL_WITH_MAX_CLOBBERS \
- aarch64_return_call_with_max_clobbers
-
#undef TARGET_CONSTANT_ALIGNMENT
#define TARGET_CONSTANT_ALIGNMENT aarch64_constant_alignment
for targets that don't have partly call-clobbered registers.
@end deftypefn
-@deftypefn {Target Hook} {rtx_insn *} TARGET_RETURN_CALL_WITH_MAX_CLOBBERS (rtx_insn *@var{call_1}, rtx_insn *@var{call_2})
-This hook returns a pointer to the call that partially clobbers the
-most registers. If a platform supports multiple ABIs where the registers
-that are partially clobbered may vary, this function compares two
-calls and returns a pointer to the one that clobbers the most registers.
-If both calls clobber the same registers, @var{call_1} must be returned.
-
-The registers clobbered in different ABIs must be a proper subset or
-superset of all other ABIs. @var{call_1} must always be a call insn,
-call_2 may be NULL or a call insn.
-@end deftypefn
-
@deftypefn {Target Hook} {const char *} TARGET_GET_MULTILIB_ABI_NAME (void)
This hook returns name of multilib ABI name.
@end deftypefn
@cindex call-saved register
@hook TARGET_HARD_REGNO_CALL_PART_CLOBBERED
-@hook TARGET_RETURN_CALL_WITH_MAX_CLOBBERS
-
@hook TARGET_GET_MULTILIB_ABI_NAME
@findex fixed_regs
#include "params.h"
#include "lra.h"
#include "lra-int.h"
+#include "function-abi.h"
/* Current iteration number of the pass and current iteration number
of the pass after the latest spill pass when any former reload
for (j = 0;
j < hard_regno_nregs (hard_regno, PSEUDO_REGNO_MODE (regno));
j++)
- if (! TEST_HARD_REG_BIT (call_used_or_fixed_regs, hard_regno + j)
+ if (! crtl->abi->clobbers_full_reg_p (hard_regno + j)
&& ! df_regs_ever_live_p (hard_regno + j))
/* It needs save restore. */
hard_regno_costs[hard_regno]
bitmap_initialize (&all_spilled_pseudos, ®_obstack);
create_live_range_start_chains ();
setup_live_pseudos_and_spill_after_risky_transforms (&all_spilled_pseudos);
- if (! lra_asm_error_p && flag_checking && !flag_ipa_ra)
+ if (! lra_asm_error_p && flag_checking)
/* Check correctness of allocation for call-crossed pseudos but
only when there are no asm errors as in the case of errors the
asm is removed and it can result in incorrect allocation. */
for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
- if (lra_reg_info[i].nrefs != 0 && reg_renumber[i] >= 0
- && lra_reg_info[i].call_insn
- && overlaps_hard_reg_set_p (call_used_or_fixed_regs,
+ if (lra_reg_info[i].nrefs != 0
+ && reg_renumber[i] >= 0
+ && overlaps_hard_reg_set_p (lra_reg_info[i].conflict_hard_regs,
PSEUDO_REGNO_MODE (i), reg_renumber[i]))
gcc_unreachable ();
/* Setup insns to process on the next constraint pass. */
/* Number of calls passed so far in current EBB. */
static int calls_num;
+/* Index ID is the CALLS_NUM associated the last call we saw with
+ ABI identifier ID. */
+static int last_call_for_abi[NUM_ABI_IDS];
+
+/* Which registers have been fully or partially clobbered by a call
+ since they were last used. */
+static HARD_REG_SET full_and_partial_call_clobbers;
+
/* Current reload pseudo check for validity of elements in
USAGE_INSNS. */
static int curr_usage_insns_check;
usage_insns[regno].reloads_num = reloads_num;
usage_insns[regno].calls_num = calls_num;
usage_insns[regno].after_p = after_p;
+ if (regno >= FIRST_PSEUDO_REGISTER && reg_renumber[regno] >= 0)
+ remove_from_hard_reg_set (&full_and_partial_call_clobbers,
+ PSEUDO_REGNO_MODE (regno),
+ reg_renumber[regno]);
}
/* The function is used to form list REGNO usages which consists of
need_for_call_save_p (int regno)
{
lra_assert (regno >= FIRST_PSEUDO_REGISTER && reg_renumber[regno] >= 0);
- return (usage_insns[regno].calls_num < calls_num
- && (overlaps_hard_reg_set_p
- ((flag_ipa_ra &&
- ! hard_reg_set_empty_p (lra_reg_info[regno].actual_call_used_reg_set))
- ? lra_reg_info[regno].actual_call_used_reg_set
- : call_used_or_fixed_regs,
- PSEUDO_REGNO_MODE (regno), reg_renumber[regno])
- || (targetm.hard_regno_call_part_clobbered
- (lra_reg_info[regno].call_insn
- ? insn_callee_abi (lra_reg_info[regno].call_insn).id () : 0,
- reg_renumber[regno], PSEUDO_REGNO_MODE (regno)))));
+ if (usage_insns[regno].calls_num < calls_num)
+ {
+ unsigned int abis = 0;
+ for (unsigned int i = 0; i < NUM_ABI_IDS; ++i)
+ if (last_call_for_abi[i] > usage_insns[regno].calls_num)
+ abis |= 1 << i;
+ gcc_assert (abis);
+ if (call_clobbered_in_region_p (abis, full_and_partial_call_clobbers,
+ PSEUDO_REGNO_MODE (regno),
+ reg_renumber[regno]))
+ return true;
+ }
+ return false;
}
/* Global registers occurring in the current EBB. */
true) the assign pass assumes that all pseudos living
through calls are assigned to call saved hard regs. */
&& (regno >= FIRST_PSEUDO_REGISTER
- || ! TEST_HARD_REG_BIT (call_used_or_fixed_regs, regno)
- || usage_insns[regno].calls_num == calls_num)
+ || !TEST_HARD_REG_BIT (full_and_partial_call_clobbers, regno))
/* We need at least 2 reloads to make pseudo splitting
profitable. We should provide hard regno splitting in
any case to solve 1st insn scheduling problem when
curr_usage_insns_check++;
clear_invariants ();
reloads_num = calls_num = 0;
+ for (unsigned int i = 0; i < NUM_ABI_IDS; ++i)
+ last_call_for_abi[i] = 0;
+ CLEAR_HARD_REG_SET (full_and_partial_call_clobbers);
bitmap_clear (&check_only_regs);
bitmap_clear (&invalid_invariant_regs);
last_processed_bb = NULL;
int regno, hard_regno;
calls_num++;
+ function_abi callee_abi = insn_callee_abi (curr_insn);
+ last_call_for_abi[callee_abi.id ()] = calls_num;
+ full_and_partial_call_clobbers
+ |= callee_abi.full_and_partial_reg_clobbers ();
if ((cheap = find_reg_note (curr_insn,
REG_RETURNED, NULL_RTX)) != NULL_RTX
&& ((cheap = XEXP (cheap, 0)), true)
/* If there are pending saves/restores, the
optimization is not worth. */
&& usage_insns[regno].calls_num == calls_num - 1
- && TEST_HARD_REG_BIT (call_used_or_fixed_regs, hard_regno))
+ && callee_abi.clobbers_reg_p (GET_MODE (cheap), hard_regno))
{
/* Restore the pseudo from the call result as
REG_RETURNED note says that the pseudo value is
/* We don't need to save/restore of the pseudo from
this call. */
usage_insns[regno].calls_num = calls_num;
+ remove_from_hard_reg_set
+ (&full_and_partial_call_clobbers,
+ GET_MODE (cheap), hard_regno);
bitmap_set_bit (&check_only_regs, regno);
}
}
/* The following fields are defined only for pseudos. */
/* Hard registers with which the pseudo conflicts. */
HARD_REG_SET conflict_hard_regs;
- /* Call used registers with which the pseudo conflicts, taking into account
- the registers used by functions called from calls which cross the
- pseudo. */
- HARD_REG_SET actual_call_used_reg_set;
/* We assign hard registers to reload pseudos which can occur in few
places. So two hard register preferences are enough for them.
The following fields define the preferred hard registers. If
int val;
/* Offset from relative eliminate register to pesudo reg. */
poly_int64 offset;
- /* Call instruction, if any, that may affect this psuedo reg. */
- rtx_insn *call_insn;
/* These members are set up in lra-lives.c and updated in
lra-coalesce.c. */
/* The biggest size mode in which each pseudo reg is referred in
}
}
-/* Check that REGNO living through calls and setjumps, set up conflict
- regs using LAST_CALL_USED_REG_SET, and clear corresponding bits in
- PSEUDOS_LIVE_THROUGH_CALLS and PSEUDOS_LIVE_THROUGH_SETJUMPS.
- CALL_INSN is a call that is representative of all calls in the region
- described by the PSEUDOS_LIVE_THROUGH_* sets, in terms of the registers
- that it preserves and clobbers. */
+/* Check whether REGNO lives through calls and setjmps and clear
+ the corresponding bits in PSEUDOS_LIVE_THROUGH_CALLS and
+ PSEUDOS_LIVE_THROUGH_SETJUMPS. All calls in the region described
+ by PSEUDOS_LIVE_THROUGH_CALLS have the given ABI. */
static inline void
-check_pseudos_live_through_calls (int regno,
- HARD_REG_SET last_call_used_reg_set,
- rtx_insn *call_insn)
+check_pseudos_live_through_calls (int regno, const function_abi &abi)
{
- int hr;
- rtx_insn *old_call_insn;
-
if (! sparseset_bit_p (pseudos_live_through_calls, regno))
return;
- function_abi callee_abi = insn_callee_abi (call_insn);
- old_call_insn = lra_reg_info[regno].call_insn;
- if (!old_call_insn
- || (targetm.return_call_with_max_clobbers
- && targetm.return_call_with_max_clobbers (old_call_insn, call_insn)
- == call_insn))
- lra_reg_info[regno].call_insn = call_insn;
+ machine_mode mode = PSEUDO_REGNO_MODE (regno);
sparseset_clear_bit (pseudos_live_through_calls, regno);
- lra_reg_info[regno].conflict_hard_regs |= last_call_used_reg_set;
-
- for (hr = 0; HARD_REGISTER_NUM_P (hr); hr++)
- if (targetm.hard_regno_call_part_clobbered (callee_abi.id (), hr,
- PSEUDO_REGNO_MODE (regno)))
- add_to_hard_reg_set (&lra_reg_info[regno].conflict_hard_regs,
- PSEUDO_REGNO_MODE (regno), hr);
+ lra_reg_info[regno].conflict_hard_regs |= abi.mode_clobbers (mode);
if (! sparseset_bit_p (pseudos_live_through_setjumps, regno))
return;
sparseset_clear_bit (pseudos_live_through_setjumps, regno);
&& TEST_BIT (reg->early_clobber_alts, n_alt)));
}
-/* Return true if call instructions CALL1 and CALL2 use ABIs that
- preserve the same set of registers. */
-
-static bool
-calls_have_same_clobbers_p (rtx_insn *call1, rtx_insn *call2)
-{
- if (!targetm.return_call_with_max_clobbers)
- return false;
-
- return (targetm.return_call_with_max_clobbers (call1, call2) == call1
- && targetm.return_call_with_max_clobbers (call2, call1) == call2);
-}
-
/* Process insns of the basic block BB to update pseudo live ranges,
pseudo hard register conflicts, and insn notes. We do it on
backward scan of BB insns. CURR_POINT is the program point where
rtx_insn *next;
rtx link, *link_loc;
bool need_curr_point_incr;
- HARD_REG_SET last_call_used_reg_set;
- rtx_insn *call_insn = NULL;
- rtx_insn *last_call_insn = NULL;
+ /* Only has a meaningful value once we've seen a call. */
+ function_abi last_call_abi = default_function_abi;
reg_live_out = df_get_live_out (bb);
sparseset_clear (pseudos_live);
sparseset_clear (pseudos_live_through_calls);
sparseset_clear (pseudos_live_through_setjumps);
- CLEAR_HARD_REG_SET (last_call_used_reg_set);
REG_SET_TO_HARD_REG_SET (hard_regs_live, reg_live_out);
hard_regs_live &= ~eliminable_regset;
EXECUTE_IF_SET_IN_BITMAP (reg_live_out, FIRST_PSEUDO_REGISTER, j, bi)
{
update_pseudo_point (reg->regno, curr_point, USE_POINT);
mark_regno_live (reg->regno, reg->biggest_mode);
- check_pseudos_live_through_calls (reg->regno,
- last_call_used_reg_set,
- call_insn);
+ /* ??? Should be a no-op for unused registers. */
+ check_pseudos_live_through_calls (reg->regno, last_call_abi);
}
if (!HARD_REGISTER_NUM_P (reg->regno))
if (call_p)
{
- call_insn = curr_insn;
- if (! flag_ipa_ra && ! targetm.return_call_with_max_clobbers)
- last_call_used_reg_set = call_used_or_fixed_regs;
- else
- {
- HARD_REG_SET this_call_used_reg_set
- = insn_callee_abi (curr_insn).full_reg_clobbers ();
- /* ??? This preserves traditional behavior; it might not
- be needed. */
- this_call_used_reg_set |= fixed_reg_set;
-
- bool flush = (! hard_reg_set_empty_p (last_call_used_reg_set)
- && (last_call_used_reg_set
- != this_call_used_reg_set))
- || (last_call_insn && ! calls_have_same_clobbers_p
- (call_insn,
- last_call_insn));
-
- EXECUTE_IF_SET_IN_SPARSESET (pseudos_live, j)
- {
- lra_reg_info[j].actual_call_used_reg_set
- |= this_call_used_reg_set;
+ function_abi call_abi = insn_callee_abi (curr_insn);
- if (flush)
- check_pseudos_live_through_calls (j,
- last_call_used_reg_set,
- last_call_insn);
- }
- last_call_used_reg_set = this_call_used_reg_set;
- last_call_insn = call_insn;
- }
+ if (last_call_abi != call_abi)
+ EXECUTE_IF_SET_IN_SPARSESET (pseudos_live, j)
+ check_pseudos_live_through_calls (j, last_call_abi);
+
+ last_call_abi = call_abi;
sparseset_ior (pseudos_live_through_calls,
pseudos_live_through_calls, pseudos_live);
if (reg->type == OP_IN)
update_pseudo_point (reg->regno, curr_point, USE_POINT);
mark_regno_live (reg->regno, reg->biggest_mode);
- check_pseudos_live_through_calls (reg->regno,
- last_call_used_reg_set,
- call_insn);
+ check_pseudos_live_through_calls (reg->regno, last_call_abi);
}
for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
}
/* Pseudos can't go in stack regs at the start of a basic block that
- is reached by an abnormal edge. Likewise for call clobbered regs,
- because caller-save, fixup_abnormal_edges and possibly the table
- driven EH machinery are not quite ready to handle such pseudos
- live across such edges. */
+ is reached by an abnormal edge. Likewise for registers that are at
+ least partly call clobbered, because caller-save, fixup_abnormal_edges
+ and possibly the table driven EH machinery are not quite ready to
+ handle such pseudos live across such edges. */
if (bb_has_abnormal_pred (bb))
{
#ifdef STACK_REGS
if (!cfun->has_nonlocal_label
&& has_abnormal_call_or_eh_pred_edge_p (bb))
for (px = 0; HARD_REGISTER_NUM_P (px); px++)
- if (call_used_or_fixed_reg_p (px)
+ if (eh_edge_abi.clobbers_at_least_part_of_reg_p (px)
#ifdef REAL_PIC_OFFSET_TABLE_REGNUM
/* We should create a conflict of PIC pseudo with PIC
hard reg as PIC hard reg can have a wrong value after
if (sparseset_cardinality (pseudos_live_through_calls) == 0)
break;
if (sparseset_bit_p (pseudos_live_through_calls, j))
- check_pseudos_live_through_calls (j, last_call_used_reg_set, call_insn);
+ check_pseudos_live_through_calls (j, last_call_abi);
}
for (i = 0; HARD_REGISTER_NUM_P (i); ++i)
lra_reg_info[i].biggest_mode = GET_MODE (regno_reg_rtx[i]);
else
lra_reg_info[i].biggest_mode = VOIDmode;
- lra_reg_info[i].call_insn = NULL;
if (!HARD_REGISTER_NUM_P (i)
&& lra_reg_info[i].nrefs != 0)
{
#include "recog.h"
#include "lra.h"
#include "lra-int.h"
+#include "function-abi.h"
/* Number of candidates for rematerialization. */
static unsigned int cands_num;
-/* The following is used for representation of call_used_or_fixed_regs in
- form array whose elements are hard register numbers with nonzero bit
- in CALL_USED_OR_FIXED_REGS. */
-static int call_used_regs_arr_len;
-static int call_used_regs_arr[FIRST_PSEUDO_REGISTER];
-
/* Bitmap used for different calculations. */
static bitmap_head temp_bitmap;
bitmap_set_bit (&subreg_regs, regno);
}
if (CALL_P (insn))
- for (int i = 0; i < call_used_regs_arr_len; i++)
- bitmap_set_bit (&get_remat_bb_data (bb)->dead_regs,
- call_used_regs_arr[i]);
+ {
+ /* Partially-clobbered registers might still be live. */
+ HARD_REG_SET clobbers = insn_callee_abi (insn).full_reg_clobbers ();
+ bitmap_ior_into (&get_remat_bb_data (bb)->dead_regs,
+ bitmap_view<HARD_REG_SET> (clobbers));
+ }
}
/* Calculate changed_regs and dead_regs for each BB. */
/* Return true if a call used register is an input operand of INSN. */
static bool
-call_used_input_regno_present_p (rtx_insn *insn)
+call_used_input_regno_present_p (const function_abi &abi, rtx_insn *insn)
{
int iter;
lra_insn_recog_data_t id = lra_get_insn_recog_data (insn);
for (reg = (iter == 0 ? id->regs : static_id->hard_regs);
reg != NULL;
reg = reg->next)
- if (reg->type == OP_IN && reg->regno < FIRST_PSEUDO_REGISTER
- && TEST_HARD_REG_BIT (call_used_or_fixed_regs, reg->regno))
+ if (reg->type == OP_IN
+ && reg->regno < FIRST_PSEUDO_REGISTER
+ && abi.clobbers_reg_p (reg->biggest_mode, reg->regno))
return true;
return false;
}
}
if (CALL_P (insn))
- EXECUTE_IF_SET_IN_BITMAP (gen_insns, 0, uid, bi)
- {
- rtx_insn *insn2 = lra_insn_recog_data[uid]->insn;
+ {
+ function_abi callee_abi = insn_callee_abi (insn);
+ EXECUTE_IF_SET_IN_BITMAP (gen_insns, 0, uid, bi)
+ {
+ rtx_insn *insn2 = lra_insn_recog_data[uid]->insn;
- cand = insn_to_cand[INSN_UID (insn2)];
- gcc_assert (cand != NULL);
- if (call_used_input_regno_present_p (insn2))
- {
- bitmap_clear_bit (gen_cands, cand->index);
- bitmap_set_bit (&temp_bitmap, uid);
- }
- }
+ cand = insn_to_cand[INSN_UID (insn2)];
+ gcc_assert (cand != NULL);
+ if (call_used_input_regno_present_p (callee_abi, insn2))
+ {
+ bitmap_clear_bit (gen_cands, cand->index);
+ bitmap_set_bit (&temp_bitmap, uid);
+ }
+ }
+ }
bitmap_and_compl_into (gen_insns, &temp_bitmap);
cand = insn_to_cand[INSN_UID (insn)];
}
if (CALL_P (insn))
- EXECUTE_IF_SET_IN_BITMAP (avail_cands, 0, cid, bi)
- {
- cand = all_cands[cid];
+ {
+ function_abi callee_abi = insn_callee_abi (insn);
+ EXECUTE_IF_SET_IN_BITMAP (avail_cands, 0, cid, bi)
+ {
+ cand = all_cands[cid];
- if (call_used_input_regno_present_p (cand->insn))
- bitmap_set_bit (&temp_bitmap, cand->index);
- }
+ if (call_used_input_regno_present_p (callee_abi, cand->insn))
+ bitmap_set_bit (&temp_bitmap, cand->index);
+ }
+ }
bitmap_and_compl_into (avail_cands, &temp_bitmap);
insn_to_cand_activation = XCNEWVEC (cand_t, get_max_uid ());
regno_cands = XCNEWVEC (cand_t, max_regno);
all_cands.create (8000);
- call_used_regs_arr_len = 0;
- for (int i = 0; i < FIRST_PSEUDO_REGISTER; i++)
- if (call_used_or_fixed_reg_p (i))
- call_used_regs_arr[call_used_regs_arr_len++] = i;
initiate_cand_table ();
create_remat_bb_data ();
bitmap_initialize (&temp_bitmap, ®_obstack);
#include "lra.h"
#include "lra-int.h"
#include "print-rtl.h"
+#include "function-abi.h"
/* Dump bitmap SET with TITLE and BB INDEX. */
void
lra_reg_info[i].no_stack_p = false;
#endif
CLEAR_HARD_REG_SET (lra_reg_info[i].conflict_hard_regs);
- CLEAR_HARD_REG_SET (lra_reg_info[i].actual_call_used_reg_set);
lra_reg_info[i].preferred_hard_regno1 = -1;
lra_reg_info[i].preferred_hard_regno2 = -1;
lra_reg_info[i].preferred_hard_regno_profit1 = 0;
lra_reg_info[i].val = get_new_reg_value ();
lra_reg_info[i].offset = 0;
lra_reg_info[i].copies = NULL;
- lra_reg_info[i].call_insn = NULL;
}
/* Initialize common reg info and copies. */
if (crtl->saves_all_registers)
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
- if (!call_used_or_fixed_reg_p (i) && !fixed_regs[i] && !LOCAL_REGNO (i))
+ if (!crtl->abi->clobbers_full_reg_p (i)
+ && !fixed_regs[i]
+ && !LOCAL_REGNO (i))
df_set_regs_ever_live (i, true);
/* We don't DF from now and avoid its using because it is to
}
/* Do inheritance only for regular algorithms. */
if (! lra_simple_p)
- {
- if (flag_ipa_ra)
- {
- if (live_p)
- lra_clear_live_ranges ();
- /* As a side-effect of lra_create_live_ranges, we calculate
- actual_call_used_reg_set, which is needed during
- lra_inheritance. */
- lra_create_live_ranges (true, true);
- live_p = true;
- }
- lra_inheritance ();
- }
+ lra_inheritance ();
if (live_p)
lra_clear_live_ranges ();
bool fails_p;
bool, (unsigned int abi_id, unsigned int regno, machine_mode mode),
hook_bool_uint_uint_mode_false)
-DEFHOOK
-(return_call_with_max_clobbers,
- "This hook returns a pointer to the call that partially clobbers the\n\
-most registers. If a platform supports multiple ABIs where the registers\n\
-that are partially clobbered may vary, this function compares two\n\
-calls and returns a pointer to the one that clobbers the most registers.\n\
-If both calls clobber the same registers, @var{call_1} must be returned.\n\
-\n\
-The registers clobbered in different ABIs must be a proper subset or\n\
-superset of all other ABIs. @var{call_1} must always be a call insn,\n\
-call_2 may be NULL or a call insn.",
- rtx_insn *, (rtx_insn *call_1, rtx_insn *call_2),
- NULL)
-
DEFHOOK
(get_multilib_abi_name,
"This hook returns name of multilib ABI name.",