+2019-09-10 Richard Sandiford <richard.sandiford@arm.com>
+
+ * hard-reg-set.h (target_hard_regs::x_call_used_reg_set): Delete.
+ (call_used_reg_set): Delete.
+ (call_used_or_fixed_regs): New macro.
+ * reginfo.c (init_reg_sets_1, globalize_reg): Remove initialization
+ of call_used_reg_set.
+ * caller-save.c (setup_save_areas): Use call_used_or_fixed_regs
+ instead of call_used_regs.
+ (save_call_clobbered_regs): Likewise.
+ * cfgcleanup.c (old_insns_match_p): Likewise.
+ * config/c6x/c6x.c (c6x_call_saved_register_used): Likewise.
+ * config/epiphany/epiphany.c (epiphany_conditional_register_usage):
+ Likewise.
+ * config/frv/frv.c (frv_ifcvt_modify_tests): Likewise.
+ * config/sh/sh.c (output_stack_adjust): Likewise.
+ * final.c (collect_fn_hard_reg_usage): Likewise.
+ * ira-build.c (ira_build): Likewise.
+ * ira-color.c (calculate_saved_nregs): Likewise.
+ (allocno_reload_assign, calculate_spill_cost): Likewise.
+ * ira-conflicts.c (ira_build_conflicts): Likewise.
+ * ira-costs.c (ira_tune_allocno_costs): Likewise.
+ * ira-lives.c (process_bb_node_lives): Likewise.
+ * ira.c (setup_reg_renumber): Likewise.
+ * lra-assigns.c (find_hard_regno_for_1, lra_assign): Likewise.
+ * lra-constraints.c (need_for_call_save_p): Likewise.
+ (need_for_split_p, inherit_in_ebb): Likewise.
+ * lra-lives.c (process_bb_lives): Likewise.
+ * lra-remat.c (call_used_input_regno_present_p): Likewise.
+ * postreload.c (reload_combine): Likewise.
+ * regrename.c (find_rename_reg): Likewise.
+ * reload1.c (reload_as_needed): Likewise.
+ * rtlanal.c (find_all_hard_reg_sets): Likewise.
+ * sel-sched.c (mark_unavailable_hard_regs): Likewise.
+ * shrink-wrap.c (requires_stack_frame_p): Likewise.
+
2019-09-10 Richard Sandiford <richard.sandiford@arm.com>
* hard-reg-set.h (target_hard_regs::x_no_caller_save_reg_set): Delete.
freq = REG_FREQ_FROM_BB (BLOCK_FOR_INSN (insn));
REG_SET_TO_HARD_REG_SET (hard_regs_to_save,
&chain->live_throughout);
- get_call_reg_set_usage (insn, &used_regs, call_used_reg_set);
+ get_call_reg_set_usage (insn, &used_regs, call_used_or_fixed_regs);
/* Record all registers set in this call insn. These don't
need to be saved. N.B. the call insn might set a subreg
REG_SET_TO_HARD_REG_SET (hard_regs_to_save,
&chain->live_throughout);
- get_call_reg_set_usage (insn, &used_regs, call_used_reg_set);
+ get_call_reg_set_usage (insn, &used_regs, call_used_or_fixed_regs);
/* Record all registers set in this call insn. These don't
need to be saved. N.B. the call insn might set a subreg
| hard_regs_saved);
hard_regs_to_save &= savable_regs;
get_call_reg_set_usage (insn, &call_def_reg_set,
- call_used_reg_set);
+ call_used_or_fixed_regs);
hard_regs_to_save &= call_def_reg_set;
for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
if (cheap
&& HARD_REGISTER_P (cheap)
- && TEST_HARD_REG_BIT (call_used_reg_set, REGNO (cheap)))
+ && TEST_HARD_REG_BIT (call_used_or_fixed_regs,
+ REGNO (cheap)))
{
rtx dest, newpat;
rtx pat = PATTERN (insn);
HARD_REG_SET i1_used, i2_used;
- get_call_reg_set_usage (i1, &i1_used, call_used_reg_set);
- get_call_reg_set_usage (i2, &i2_used, call_used_reg_set);
+ get_call_reg_set_usage (i1, &i1_used, call_used_or_fixed_regs);
+ get_call_reg_set_usage (i2, &i2_used, call_used_or_fixed_regs);
if (i1_used != i2_used)
return dir_none;
INIT_CUMULATIVE_ARGS (cum_v, NULL, NULL, 0, 0);
cum = pack_cumulative_args (&cum_v);
- call_saved_regset = ~call_used_reg_set;
+ call_saved_regset = ~call_used_or_fixed_regs;
for (i = 0; i < call_expr_nargs (call_expr); i++)
{
parameter = CALL_EXPR_ARG (call_expr, i);
CLEAR_HARD_REG_SET (reg_class_contents[SHORT_INSN_REGS]);
reg_class_contents[SIBCALL_REGS] = reg_class_contents[GENERAL_REGS];
/* It would be simpler and quicker if we could just use
- &~, alas, call_used_reg_set is yet uninitialized;
+ &~, alas, call_used_or_fixed_regs is yet uninitialized;
it is set up later by our caller. */
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (!call_used_regs[i])
not fixed. However, allow the ICC/ICR temporary registers to be allocated
if we did not need to use them in reloading other registers. */
memset (&tmp_reg->regs, 0, sizeof (tmp_reg->regs));
- tmp_reg->regs = call_used_reg_set &~ fixed_reg_set;
+ tmp_reg->regs = call_used_or_fixed_regs &~ fixed_reg_set;
SET_HARD_REG_BIT (tmp_reg->regs, ICC_TEMP);
SET_HARD_REG_BIT (tmp_reg->regs, ICR_TEMP);
temp = -1;
if (temp < 0 && ! current_function_interrupt && epilogue_p >= 0)
{
- HARD_REG_SET temps = (call_used_reg_set
+ HARD_REG_SET temps = (call_used_or_fixed_regs
& ~fixed_reg_set
& savable_regs);
if (epilogue_p > 0)
&& !self_recursive_call_p (insn))
{
if (!get_call_reg_set_usage (insn, &insn_used_regs,
- call_used_reg_set))
+ call_used_or_fixed_regs))
return;
function_used_regs |= insn_used_regs;
/* The information we have gathered is only interesting if it exposes a
register from the call_used_regs that is not used in this function. */
- if (hard_reg_set_subset_p (call_used_reg_set, function_used_regs))
+ if (hard_reg_set_subset_p (call_used_or_fixed_regs, function_used_regs))
return;
node = cgraph_node::rtl_info (current_function_decl);
char x_call_really_used_regs[FIRST_PSEUDO_REGISTER];
- /* The same info as a HARD_REG_SET. */
- HARD_REG_SET x_call_used_reg_set;
-
/* For targets that use reload rather than LRA, this is the set
of registers that we are able to save and restore around calls
(i.e. those for which we know a suitable mode and set of
(this_target_hard_regs->x_call_used_regs)
#define call_really_used_regs \
(this_target_hard_regs->x_call_really_used_regs)
-#define call_used_reg_set \
- (this_target_hard_regs->x_call_used_reg_set)
#define savable_regs \
(this_target_hard_regs->x_savable_regs)
#define regs_invalidated_by_call \
(this_target_hard_regs->x_regs_invalidated_by_call)
+#define call_used_or_fixed_regs \
+ (regs_invalidated_by_call | fixed_reg_set)
#define reg_alloc_order \
(this_target_hard_regs->x_reg_alloc_order)
#define inv_reg_alloc_order \
allocno crossing calls. */
FOR_EACH_ALLOCNO (a, ai)
if (ALLOCNO_CALLS_CROSSED_NUM (a) != 0)
- ior_hard_reg_conflicts (a, call_used_reg_set);
+ ior_hard_reg_conflicts (a, call_used_or_fixed_regs);
}
if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
print_copies (ira_dump_file);
ira_assert (hard_regno >= 0);
for (i = hard_regno_nregs (hard_regno, mode) - 1; i >= 0; i--)
if (!allocated_hardreg_p[hard_regno + i]
- && !TEST_HARD_REG_BIT (call_used_reg_set, hard_regno + i)
+ && !TEST_HARD_REG_BIT (call_used_or_fixed_regs, hard_regno + i)
&& !LOCAL_REGNO (hard_regno + i))
nregs++;
return nregs;
saved[i] = OBJECT_TOTAL_CONFLICT_HARD_REGS (obj);
OBJECT_TOTAL_CONFLICT_HARD_REGS (obj) |= forbidden_regs;
if (! flag_caller_saves && ALLOCNO_CALLS_CROSSED_NUM (a) != 0)
- OBJECT_TOTAL_CONFLICT_HARD_REGS (obj) |= call_used_reg_set;
+ OBJECT_TOTAL_CONFLICT_HARD_REGS (obj) |= call_used_or_fixed_regs;
}
ALLOCNO_ASSIGNED_P (a) = false;
aclass = ALLOCNO_CLASS (a);
[aclass][hard_regno]]));
if (ALLOCNO_CALLS_CROSSED_NUM (a) != 0
&& ira_hard_reg_set_intersection_p (hard_regno, ALLOCNO_MODE (a),
- call_used_reg_set))
+ call_used_or_fixed_regs))
{
ira_assert (flag_caller_saves);
caller_save_needed = 1;
cost += ALLOCNO_MEMORY_COST (a) - ALLOCNO_CLASS_COST (a);
nregs = hard_regno_nregs (hard_regno, ALLOCNO_MODE (a));
for (j = 0; j < nregs; j++)
- if (! TEST_HARD_REG_BIT (call_used_reg_set, hard_regno + j))
+ if (! TEST_HARD_REG_BIT (call_used_or_fixed_regs, hard_regno + j))
break;
if (j == nregs)
count++;
else
temp_hard_reg_set = (reg_class_contents[base]
& ~ira_no_alloc_regs
- & call_used_reg_set);
+ & call_used_or_fixed_regs);
FOR_EACH_ALLOCNO (a, ai)
{
int i, n = ALLOCNO_NUM_OBJECTS (a);
&& REG_USERVAR_P (allocno_reg)
&& ! reg_is_parm_p (allocno_reg)))
{
- OBJECT_TOTAL_CONFLICT_HARD_REGS (obj) |= call_used_reg_set;
- OBJECT_CONFLICT_HARD_REGS (obj) |= call_used_reg_set;
+ OBJECT_TOTAL_CONFLICT_HARD_REGS (obj) |= call_used_or_fixed_regs;
+ OBJECT_CONFLICT_HARD_REGS (obj) |= call_used_or_fixed_regs;
}
else if (ALLOCNO_CALLS_CROSSED_NUM (a) != 0)
{
HARD_REG_SET no_caller_save_reg_set
- = (call_used_reg_set & ~savable_regs);
+ = (call_used_or_fixed_regs & ~savable_regs);
OBJECT_TOTAL_CONFLICT_HARD_REGS (obj) |= no_caller_save_reg_set;
OBJECT_TOTAL_CONFLICT_HARD_REGS (obj) |= temp_hard_reg_set;
OBJECT_CONFLICT_HARD_REGS (obj) |= no_caller_save_reg_set;
/* Allocnos bigger than the saved part of call saved
regs must conflict with them. */
for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
- if (!TEST_HARD_REG_BIT (call_used_reg_set, regno)
+ if (!TEST_HARD_REG_BIT (call_used_or_fixed_regs, regno)
&& targetm.hard_regno_call_part_clobbered (NULL, regno,
obj_mode))
{
if (ira_hard_reg_set_intersection_p (regno, mode,
*crossed_calls_clobber_regs)
&& (ira_hard_reg_set_intersection_p (regno, mode,
- call_used_reg_set)
+ call_used_or_fixed_regs)
|| targetm.hard_regno_call_part_clobbered (NULL, regno,
mode)))
cost += (ALLOCNO_CALL_FREQ (a)
HARD_REG_SET this_call_used_reg_set;
get_call_reg_set_usage (insn, &this_call_used_reg_set,
- call_used_reg_set);
+ call_used_or_fixed_regs);
/* Don't allocate allocnos that cross setjmps or any
call, if this function receives a nonlocal
}
if (ALLOCNO_CALLS_CROSSED_NUM (a) != 0
&& ira_hard_reg_set_intersection_p (hard_regno, ALLOCNO_MODE (a),
- call_used_reg_set))
+ call_used_or_fixed_regs))
{
ira_assert (!optimize || flag_caller_saves
|| (ALLOCNO_CALLS_CROSSED_NUM (a)
for (j = 0;
j < hard_regno_nregs (hard_regno, PSEUDO_REGNO_MODE (regno));
j++)
- if (! TEST_HARD_REG_BIT (call_used_reg_set, hard_regno + j)
+ if (! TEST_HARD_REG_BIT (call_used_or_fixed_regs, hard_regno + j)
&& ! df_regs_ever_live_p (hard_regno + j))
/* It needs save restore. */
hard_regno_costs[hard_regno]
for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
if (lra_reg_info[i].nrefs != 0 && reg_renumber[i] >= 0
&& lra_reg_info[i].call_insn
- && overlaps_hard_reg_set_p (call_used_reg_set,
+ && overlaps_hard_reg_set_p (call_used_or_fixed_regs,
PSEUDO_REGNO_MODE (i), reg_renumber[i]))
gcc_unreachable ();
/* Setup insns to process on the next constraint pass. */
((flag_ipa_ra &&
! hard_reg_set_empty_p (lra_reg_info[regno].actual_call_used_reg_set))
? lra_reg_info[regno].actual_call_used_reg_set
- : call_used_reg_set,
+ : call_used_or_fixed_regs,
PSEUDO_REGNO_MODE (regno), reg_renumber[regno])
|| (targetm.hard_regno_call_part_clobbered
(lra_reg_info[regno].call_insn,
true) the assign pass assumes that all pseudos living
through calls are assigned to call saved hard regs. */
&& (regno >= FIRST_PSEUDO_REGISTER
- || ! TEST_HARD_REG_BIT (call_used_reg_set, regno)
+ || ! TEST_HARD_REG_BIT (call_used_or_fixed_regs, regno)
|| usage_insns[regno].calls_num == calls_num)
/* We need at least 2 reloads to make pseudo splitting
profitable. We should provide hard regno splitting in
/* If there are pending saves/restores, the
optimization is not worth. */
&& usage_insns[regno].calls_num == calls_num - 1
- && TEST_HARD_REG_BIT (call_used_reg_set, hard_regno))
+ && TEST_HARD_REG_BIT (call_used_or_fixed_regs, hard_regno))
{
/* Restore the pseudo from the call result as
REG_RETURNED note says that the pseudo value is
{
call_insn = curr_insn;
if (! flag_ipa_ra && ! targetm.return_call_with_max_clobbers)
- last_call_used_reg_set = call_used_reg_set;
+ last_call_used_reg_set = call_used_or_fixed_regs;
else
{
HARD_REG_SET this_call_used_reg_set;
get_call_reg_set_usage (curr_insn, &this_call_used_reg_set,
- call_used_reg_set);
+ call_used_or_fixed_regs);
bool flush = (! hard_reg_set_empty_p (last_call_used_reg_set)
&& (last_call_used_reg_set
/* Number of candidates for rematerialization. */
static unsigned int cands_num;
-/* The following is used for representation of call_used_reg_set in
+/* The following is used for representation of call_used_or_fixed_regs in
form array whose elements are hard register numbers with nonzero bit
- in CALL_USED_REG_SET. */
+ in CALL_USED_OR_FIXED_REGS. */
static int call_used_regs_arr_len;
static int call_used_regs_arr[FIRST_PSEUDO_REGISTER];
reg != NULL;
reg = reg->next)
if (reg->type == OP_IN && reg->regno < FIRST_PSEUDO_REGISTER
- && TEST_HARD_REG_BIT (call_used_reg_set, reg->regno))
+ && TEST_HARD_REG_BIT (call_used_or_fixed_regs, reg->regno))
return true;
return false;
}
rtx link;
HARD_REG_SET used_regs;
- get_call_reg_set_usage (insn, &used_regs, call_used_reg_set);
+ get_call_reg_set_usage (insn, &used_regs, call_used_or_fixed_regs);
for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
if (TEST_HARD_REG_BIT (used_regs, r))
/* Initialize "constant" tables. */
CLEAR_HARD_REG_SET (fixed_reg_set);
- CLEAR_HARD_REG_SET (call_used_reg_set);
CLEAR_HARD_REG_SET (regs_invalidated_by_call);
operand_reg_set &= accessible_reg_set;
if (fixed_regs[i])
SET_HARD_REG_BIT (fixed_reg_set, i);
- if (call_used_regs[i])
- SET_HARD_REG_BIT (call_used_reg_set, i);
-
/* There are a couple of fixed registers that we know are safe to
exclude from being clobbered by calls:
{
fixed_regs[i] = call_used_regs[i] = 1;
SET_HARD_REG_BIT (fixed_reg_set, i);
- SET_HARD_REG_BIT (call_used_reg_set, i);
}
}
#endif
SET_HARD_REG_BIT (fixed_reg_set, i);
- SET_HARD_REG_BIT (call_used_reg_set, i);
reinit_regs ();
}
If the chain needs a call-saved register, mark the call-used
registers as unavailable. */
if (this_head->need_caller_save_reg)
- *unavailable |= call_used_reg_set;
+ *unavailable |= call_used_or_fixed_regs;
/* Mark registers that overlap this chain's lifetime as unavailable. */
merge_overlapping_regs (unavailable, this_head);
be partially clobbered by the call. */
else if (CALL_P (insn))
{
- reg_reloaded_valid &= ~(call_used_reg_set
+ reg_reloaded_valid &= ~(call_used_or_fixed_regs
| reg_reloaded_call_part_clobbered);
/* If this is a call to a setjmp-type function, we must not
CLEAR_HARD_REG_SET (*pset);
note_stores (insn, record_hard_reg_sets, pset);
if (CALL_P (insn) && implicit)
- *pset |= call_used_reg_set;
+ *pset |= call_used_or_fixed_regs;
for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
if (REG_NOTE_KIND (link) == REG_INC)
record_hard_reg_sets (XEXP (link, 0), NULL, pset);
reg_rename_p->unavailable_hard_regs |= sel_hrd.stack_regs;
#endif
- /* If there's a call on this path, make regs from call_used_reg_set
+ /* If there's a call on this path, make regs from call_used_or_fixed_regs
unavailable. */
if (def->crosses_call)
- reg_rename_p->unavailable_hard_regs |= call_used_reg_set;
+ reg_rename_p->unavailable_hard_regs |= call_used_or_fixed_regs;
/* Stop here before reload: we need FRAME_REGS, STACK_REGS, and crosses_call,
but not register classes. */
}
if (hard_reg_set_intersect_p (hardregs, prologue_used))
return true;
- hardregs &= ~call_used_reg_set;
+ hardregs &= ~call_used_or_fixed_regs;
for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
if (TEST_HARD_REG_BIT (hardregs, regno)
&& df_regs_ever_live_p (regno))