+2019-09-09 Richard Sandiford <richard.sandiford@arm.com>
+
+ * rtl.h (CALL_INSN_FUNCTION_USAGE): Document what SETs mean.
+ (note_pattern_stores): Declare.
+ (note_stores): Take an rtx_insn *.
+ * rtlanal.c (set_of): Use note_pattern_stores instead of note_stores.
+ (find_all_hard_reg_sets): Pass the insn rather than its pattern to
+ note_stores. Remove explicit handling of CALL_INSN_FUNCTION_USAGE.
+ (note_stores): Take an rtx_insn * as argument and process
+ CALL_INSN_FUNCTION_USAGE. Rename old function to...
+ (note_pattern_stores): ...this.
+ (find_first_parameter_load): Pass the insn rather than
+ its pattern to note_stores.
+ * alias.c (memory_modified_in_insn_p, init_alias_analysis): Likewise.
+ * caller-save.c (setup_save_areas, save_call_clobbered_regs)
+ (insert_one_insn): Likewise.
+ * combine.c (combine_instructions): Likewise.
+ (likely_spilled_retval_p): Likewise.
+ (try_combine): Use note_pattern_stores instead of note_stores.
+ (record_dead_and_set_regs): Pass the insn rather than its pattern
+ to note_stores.
+ (reg_dead_at_p): Likewise.
+ * config/bfin/bfin.c (workaround_speculation): Likewise.
+ * config/c6x/c6x.c (maybe_clobber_cond): Likewise. Take an rtx_insn *
+ rather than an rtx.
+ * config/frv/frv.c (frv_registers_update): Use note_pattern_stores
+ instead of note_stores.
+ (frv_optimize_membar_local): Pass the insn rather than its pattern
+ to note_stores.
+ * config/gcn/gcn.c (gcn_md_reorg): Likewise.
+ * config/i386/i386.c (ix86_avx_u128_mode_after): Likewise.
+ * config/mips/mips.c (vr4130_true_reg_dependence_p): Likewise.
+ (r10k_needs_protection_p, mips_sim_issue_insn): Likewise.
+ (mips_reorg_process_insns): Likewise.
+ * config/s390/s390.c (s390_regs_ever_clobbered): Likewise.
+ * config/sh/sh.c (flow_dependent_p): Likewise. Take rtx_insn *s
+ rather than rtxes.
+ * cse.c (delete_trivially_dead_insns): Pass the insn rather than
+ its pattern to note_stores.
+ * cselib.c (cselib_record_sets): Use note_pattern_stores instead
+ of note_stores.
+ * dce.c (mark_nonreg_stores): Remove the "body" parameter and pass
+ the insn to note_stores.
+ (prescan_insns_for_dce): Update call accordingly.
+ * ddg.c (mem_write_insn_p): Pass the insn rather than its pattern
+ to note_stores.
+ * df-problems.c (can_move_insns_across): Likewise.
+ * dse.c (emit_inc_dec_insn_before, replace_read): Likewise.
+ * function.c (assign_parm_setup_reg): Likewise.
+ * gcse-common.c (record_last_mem_set_info_common): Likewise.
+ * gcse.c (load_killed_in_block_p, compute_hash_table_work): Likewise.
+ (single_set_gcse): Likewise.
+ * ira.c (validate_equiv_mem): Likewise.
+ (update_equiv_regs): Use note_pattern_stores rather than note_stores
+ for no_equiv.
+ * loop-doloop.c (doloop_optimize): Pass the insn rather than its
+ pattern to note_stores.
+ * loop-invariant.c (calculate_loop_reg_pressure): Likewise.
+ * loop-iv.c (simplify_using_initial_values): Likewise.
+ * mode-switching.c (optimize_mode_switching): Likewise.
+ * optabs.c (emit_libcall_block_1): Likewise.
+ (expand_atomic_compare_and_swap): Likewise.
+ * postreload-gcse.c (load_killed_in_block_p): Likewise.
+ (record_opr_changes): Likewise. Remove explicit handling of
+ CALL_INSN_FUNCTION_USAGE.
+ * postreload.c (reload_combine, reload_cse_move2add): Likewise.
+ * regcprop.c (kill_clobbered_values): Likewise.
+ (copyprop_hardreg_forward_1): Pass the insn rather than its pattern
+ to note_stores.
+ * regrename.c (build_def_use): Likewise.
+ * reload1.c (reload): Use note_pattern_stores instead of note_stores
+ for mark_not_eliminable.
+ (reload_as_needed): Pass the insn rather than its pattern
+ to note_stores.
+ (emit_output_reload_insns): Likewise.
+ * resource.c (mark_target_live_regs): Likewise.
+ * sched-deps.c (init_insn_reg_pressure_info): Likewise.
+ * sched-rgn.c (sets_likely_spilled): Use note_pattern_stores
+ instead of note_stores.
+ * shrink-wrap.c (try_shrink_wrapping): Pass the insn rather than
+ its pattern to note_stores.
+ * stack-ptr-mod.c (pass_stack_ptr_mod::execute): Likewise.
+ * var-tracking.c (adjust_insn, add_with_sets): Likewise.
+
2019-09-09 Richard Sandiford <richard.sandiford@arm.com>
* hard-reg-set.h (HARD_REG_SET): Define using a typedef rather
if (CALL_P (insn))
return true;
memory_modified = false;
- note_stores (PATTERN (insn), memory_modified_1, CONST_CAST_RTX(mem));
+ note_stores (as_a<const rtx_insn *> (insn), memory_modified_1,
+ CONST_CAST_RTX(mem));
return memory_modified;
}
&& find_reg_note (insn, REG_NOALIAS, NULL_RTX))
record_set (SET_DEST (PATTERN (insn)), NULL_RTX, NULL);
else
- note_stores (PATTERN (insn), record_set, NULL);
+ note_stores (insn, record_set, NULL);
set = single_set (insn);
live during the call, but the subreg that is set
isn't. */
CLEAR_HARD_REG_SET (this_insn_sets);
- note_stores (PATTERN (insn), mark_set_regs, &this_insn_sets);
+ note_stores (insn, mark_set_regs, &this_insn_sets);
/* Sibcalls are considered to set the return value. */
if (SIBLING_CALL_P (insn) && crtl->return_rtx)
mark_set_regs (crtl->return_rtx, NULL_RTX, &this_insn_sets);
live during the call, but the subreg that is set
isn't. */
CLEAR_HARD_REG_SET (this_insn_sets);
- note_stores (PATTERN (insn), mark_set_regs, &this_insn_sets);
+ note_stores (insn, mark_set_regs, &this_insn_sets);
/* Sibcalls are considered to set the return value,
compare df-scan.c:df_get_call_refs. */
if (SIBLING_CALL_P (insn) && crtl->return_rtx)
be live across the call, while the other is set
afterwards. */
CLEAR_HARD_REG_SET (this_insn_sets);
- note_stores (PATTERN (insn), mark_set_regs, &this_insn_sets);
+ note_stores (insn, mark_set_regs, &this_insn_sets);
AND_COMPL_HARD_REG_SET (hard_regs_saved, this_insn_sets);
}
multi-hard-reg pseudo; then the pseudo is considered live
during the call, but the subreg that is set isn't. */
CLEAR_HARD_REG_SET (this_insn_sets);
- note_stores (PATTERN (insn), mark_set_regs, &this_insn_sets);
+ note_stores (insn, mark_set_regs, &this_insn_sets);
/* Compute which hard regs must be saved before this call. */
AND_COMPL_HARD_REG_SET (hard_regs_to_save, call_fixed_reg_set);
/* Registers that are set in CHAIN->INSN live in the new insn.
(Unless there is a REG_UNUSED note for them, but we don't
look for them here.) */
- note_stores (PATTERN (chain->insn), add_stored_regs,
- &new_chain->live_throughout);
+ note_stores (chain->insn, add_stored_regs, &new_chain->live_throughout);
CLEAR_REG_SET (&new_chain->dead_or_set);
if (chain->insn == BB_END (BASIC_BLOCK_FOR_FN (cfun, chain->block)))
BB_END (BASIC_BLOCK_FOR_FN (cfun, chain->block)) = new_chain->insn;
subst_low_luid = DF_INSN_LUID (insn);
subst_insn = insn;
- note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies,
- insn);
+ note_stores (insn, set_nonzero_bits_and_sign_copies, insn);
record_dead_and_set_regs (insn);
if (AUTO_INC_DEC)
info.mask = mask;
for (p = PREV_INSN (use); info.mask && p != insn; p = PREV_INSN (p))
if (INSN_P (p))
- note_stores (PATTERN (p), likely_spilled_retval_1, &info);
+ note_stores (p, likely_spilled_retval_1, &info);
mask = info.mask;
/* Check if any of the (probably) live return value registers is
been made to this insn. The order is important, because newi2pat
can affect nonzero_bits of newpat. */
if (newi2pat)
- note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL);
- note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL);
+ note_pattern_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL);
+ note_pattern_stores (newpat, set_nonzero_bits_and_sign_copies, NULL);
}
if (undobuf.other_insn != NULL_RTX)
the return value register is set at this LUID. We could
still replace a register with the return value from the
wrong subroutine call! */
- note_stores (PATTERN (insn), record_dead_and_set_regs_1, NULL_RTX);
+ note_stores (insn, record_dead_and_set_regs_1, NULL_RTX);
}
else
- note_stores (PATTERN (insn), record_dead_and_set_regs_1, insn);
+ note_stores (insn, record_dead_and_set_regs_1, insn);
}
/* If a SUBREG has the promoted bit set, it is in fact a property of the
if (find_regno_note (insn, REG_UNUSED, reg_dead_regno))
return 1;
- note_stores (PATTERN (insn), reg_dead_at_p_1, NULL);
+ note_stores (insn, reg_dead_at_p_1, NULL);
if (reg_dead_flag)
return reg_dead_flag == 1 ? 1 : 0;
we found earlier. */
if (recog_memoized (insn) != CODE_FOR_compare_eq)
{
- note_stores (PATTERN (insn), note_np_check_stores, NULL);
+ note_stores (insn, note_np_check_stores, NULL);
if (np_check_regno != -1)
{
if (find_regno_note (insn, REG_INC, np_check_regno))
only those jumps which are still in flight. */
static void
-maybe_clobber_cond (rtx insn, int clock_var)
+maybe_clobber_cond (rtx_insn *insn, int clock_var)
{
int n, idx;
idx = ss.jump_cycle_index;
continue;
}
- note_stores (PATTERN (insn), clobber_cond_1, ss.jump_cond + idx);
+ note_stores (insn, clobber_cond_1, ss.jump_cond + idx);
for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
if (REG_NOTE_KIND (link) == REG_INC)
clobber_cond_1 (XEXP (link, 0), NULL_RTX, ss.jump_cond + idx);
flags |= frv_cond_flags (XEXP (x, 0));
x = XEXP (x, 1);
}
- note_stores (x, frv_registers_update_1, &flags);
+ note_pattern_stores (x, frv_registers_update_1, &flags);
}
/* Invalidate NEXT_IO's address if it depends on something that
is clobbered by INSN. */
if (next_io->var_address)
- note_stores (PATTERN (insn), frv_io_check_address,
- &next_io->var_address);
+ note_stores (insn, frv_io_check_address, &next_io->var_address);
/* If the next membar is associated with a __builtin_read,
see if INSN reads from that address. If it does, and if
if (volatile_refs_p (PATTERN (insn)))
CLEAR_HARD_REG_SET (used_regs);
else
- note_stores (PATTERN (insn), frv_io_handle_set, &used_regs);
+ note_stores (insn, frv_io_handle_set, &used_regs);
note_uses (&PATTERN (insn), frv_io_handle_use, &used_regs);
break;
HARD_REG_SET defs, uses;
CLEAR_HARD_REG_SET (defs);
CLEAR_HARD_REG_SET (uses);
- note_stores (PATTERN (insn), record_hard_reg_sets, &defs);
+ note_stores (insn, record_hard_reg_sets, &defs);
note_uses (&PATTERN (insn), record_hard_reg_uses, &uses);
bool exec_lo_def_p = TEST_HARD_REG_BIT (defs, EXEC_LO_REG);
HARD_REG_SET ireads, iwrites;
CLEAR_HARD_REG_SET (ireads);
CLEAR_HARD_REG_SET (iwrites);
- note_stores (PATTERN (insn), record_hard_reg_sets, &iwrites);
+ note_stores (insn, record_hard_reg_sets, &iwrites);
note_uses (&PATTERN (insn), record_hard_reg_uses, &ireads);
/* Scan recent previous instructions for dependencies not handled in
if (CALL_P (insn))
{
bool avx_upper_reg_found = false;
- note_stores (pat, ix86_check_avx_upper_stores, &avx_upper_reg_found);
+ note_stores (insn, ix86_check_avx_upper_stores, &avx_upper_reg_found);
return avx_upper_reg_found ? AVX_U128_DIRTY : AVX_U128_CLEAN;
}
static bool
vr4130_true_reg_dependence_p (rtx insn)
{
- note_stores (PATTERN (vr4130_last_insn),
- vr4130_true_reg_dependence_p_1, &insn);
+ note_stores (vr4130_last_insn, vr4130_true_reg_dependence_p_1, &insn);
return insn == 0;
}
if (mips_r10k_cache_barrier == R10K_CACHE_BARRIER_STORE)
{
- note_stores (PATTERN (insn), r10k_needs_protection_p_store, &insn);
+ note_stores (insn, r10k_needs_protection_p_store, &insn);
return insn == NULL_RTX;
}
state->insns_left);
mips_sim_insn = insn;
- note_stores (PATTERN (insn), mips_sim_record_set, state);
+ note_stores (insn, mips_sim_record_set, state);
}
/* Simulate issuing a NOP in state STATE. */
&uses);
HARD_REG_SET delay_sets;
CLEAR_HARD_REG_SET (delay_sets);
- note_stores (PATTERN (SEQ_END (insn)), record_hard_reg_sets,
+ note_stores (SEQ_END (insn), record_hard_reg_sets,
&delay_sets);
rtx_insn *prev = prev_active_insn (insn);
{
HARD_REG_SET sets;
CLEAR_HARD_REG_SET (sets);
- note_stores (PATTERN (prev), record_hard_reg_sets,
- &sets);
+ note_stores (prev, record_hard_reg_sets, &sets);
/* Re-order if safe. */
if (!hard_reg_set_intersect_p (delay_sets, uses)
continue;
}
- note_stores (pat,
+ note_stores (cur_insn,
s390_reg_clobbered_rtx,
regs_ever_clobbered);
}
HOST_WIDE_INT, tree);
static void sh_file_start (void);
static bool sh_assemble_integer (rtx, unsigned int, int);
-static bool flow_dependent_p (rtx, rtx);
+static bool flow_dependent_p (rtx_insn *, rtx_insn *);
static void flow_dependent_p_1 (rtx, const_rtx, void *);
static int shiftcosts (rtx);
static int and_xor_ior_costs (rtx, int);
/* Check if INSN is flow-dependent on DEP_INSN. Can also be used to check
if DEP_INSN is anti-flow dependent on INSN. */
static bool
-flow_dependent_p (rtx insn, rtx dep_insn)
+flow_dependent_p (rtx_insn *insn, rtx_insn *dep_insn)
{
rtx tmp = PATTERN (insn);
- note_stores (PATTERN (dep_insn), flow_dependent_p_1, &tmp);
+ note_stores (dep_insn, flow_dependent_p_1, &tmp);
return tmp == NULL_RTX;
}
else if (INSN_P (insn))
{
count_reg_usage (insn, counts, NULL_RTX, 1);
- note_stores (PATTERN (insn), count_stores, counts + nreg * 2);
+ note_stores (insn, count_stores, counts + nreg * 2);
}
/* If there can be debug insns, COUNTS are 3 consecutive arrays.
First one counts how many times each pseudo is used outside
/* Invalidate all locations written by this insn. Note that the elts we
looked up in the previous loop aren't affected, just some of their
locations may go away. */
- note_stores (body, cselib_invalidate_rtx_note_stores, NULL);
+ note_pattern_stores (body, cselib_invalidate_rtx_note_stores, NULL);
for (i = n_sets_before_autoinc; i < n_sets; i++)
cselib_invalidate_rtx (sets[i].dest);
}
-/* Mark INSN if BODY stores to a non-register destination. */
+/* Mark INSN if it stores to a non-register destination. */
static void
-mark_nonreg_stores (rtx body, rtx_insn *insn, bool fast)
+mark_nonreg_stores (rtx_insn *insn, bool fast)
{
if (fast)
- note_stores (body, mark_nonreg_stores_1, insn);
+ note_stores (insn, mark_nonreg_stores_1, insn);
else
- note_stores (body, mark_nonreg_stores_2, insn);
+ note_stores (insn, mark_nonreg_stores_2, insn);
}
if (arg_stores && bitmap_bit_p (arg_stores, INSN_UID (insn)))
continue;
if (deletable_insn_p (insn, fast, arg_stores))
- mark_nonreg_stores (PATTERN (insn), insn, fast);
+ mark_nonreg_stores (insn, fast);
else
mark_insn (insn, fast);
}
mem_write_insn_p (rtx_insn *insn)
{
mem_ref_p = false;
- note_stores (PATTERN (insn), mark_mem_store, NULL);
+ note_stores (insn, mark_mem_store, NULL);
return mem_ref_p;
}
if (volatile_insn_p (PATTERN (insn)))
return false;
memrefs_in_across |= find_memory (insn);
- note_stores (PATTERN (insn), find_memory_stores,
- &mem_sets_in_across);
+ note_stores (insn, find_memory_stores, &mem_sets_in_across);
/* This is used just to find sets of the stack pointer. */
memrefs_in_across |= mem_sets_in_across;
trapping_insns_in_across |= may_trap_p (PATTERN (insn));
{
int mem_ref_flags = 0;
int mem_set_flags = 0;
- note_stores (PATTERN (insn), find_memory_stores, &mem_set_flags);
+ note_stores (insn, find_memory_stores, &mem_set_flags);
mem_ref_flags = find_memory (insn);
/* Catch sets of the stack pointer. */
mem_ref_flags |= mem_set_flags;
for (cur = new_insn; cur; cur = NEXT_INSN (cur))
{
info.current = cur;
- note_stores (PATTERN (cur), note_add_store, &info);
+ note_stores (cur, note_add_store, &info);
}
/* If a failure was flagged above, return 1 so that for_each_inc_dec will
bitmap regs_set = BITMAP_ALLOC (®_obstack);
for (this_insn = insns; this_insn != NULL_RTX; this_insn = NEXT_INSN (this_insn))
- note_stores (PATTERN (this_insn), look_for_hardregs, regs_set);
+ note_stores (this_insn, look_for_hardregs, regs_set);
bitmap_and_into (regs_set, regs_live);
if (!bitmap_empty_p (regs_set))
for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
{
if (INSN_P (insn))
- note_stores (PATTERN (insn), record_hard_reg_sets,
- &hardregs);
+ note_stores (insn, record_hard_reg_sets, &hardregs);
if (!hard_reg_set_empty_p (hardregs))
moved = false;
}
struct gcse_note_stores_info data;
data.insn = insn;
data.canon_mem_list = canon_modify_mem_list;
- note_stores (PATTERN (insn), canon_list_insert, (void*) &data);
+ note_stores (insn, canon_list_insert, (void*) &data);
}
}
note_stores to examine each hunk of memory that is modified. */
mci.mem = x;
mci.conflict = false;
- note_stores (PATTERN (setter), mems_conflict_for_gcse_p, &mci);
+ note_stores (setter, mems_conflict_for_gcse_p, &mci);
if (mci.conflict)
return 1;
}
record_last_mem_set_info (insn);
}
- note_stores (PATTERN (insn), record_last_set_info, insn);
+ note_stores (insn, record_last_set_info, insn);
}
/* The next pass builds the hash table. */
s.insn = insn;
s.nsets = 0;
- note_stores (pattern, record_set_data, &s);
+ note_pattern_stores (pattern, record_set_data, &s);
/* Considered invariant insns have exactly one set. */
gcc_assert (s.nsets == 1);
return valid_none;
}
- note_stores (PATTERN (insn), validate_equiv_mem_from_store, &info);
+ note_stores (insn, validate_equiv_mem_from_store, &info);
if (info.equiv_mem_modified)
return valid_none;
if (set == NULL_RTX
|| side_effects_p (SET_SRC (set)))
{
- note_stores (PATTERN (insn), no_equiv, NULL);
+ note_pattern_stores (PATTERN (insn), no_equiv, NULL);
continue;
}
else if (GET_CODE (PATTERN (insn)) == PARALLEL)
{
rtx part = XVECEXP (PATTERN (insn), 0, i);
if (part != set)
- note_stores (part, no_equiv, NULL);
+ note_pattern_stores (part, no_equiv, NULL);
}
}
{
/* This might be setting a SUBREG of a pseudo, a pseudo that is
also set somewhere else to a constant. */
- note_stores (set, no_equiv, NULL);
+ note_pattern_stores (set, no_equiv, NULL);
continue;
}
equivalent to a mem. */
if (MEM_P (src) && reg_equiv[regno].pdx_subregs)
{
- note_stores (set, no_equiv, NULL);
+ note_pattern_stores (set, no_equiv, NULL);
continue;
}
bitmap modified = BITMAP_ALLOC (NULL);
for (rtx_insn *i = doloop_seq; i != NULL; i = NEXT_INSN (i))
- note_stores (PATTERN (i), record_reg_sets, modified);
+ note_stores (i, record_reg_sets, modified);
basic_block loop_end = desc->out_edge->src;
bool fail = bitmap_intersect_p (df_get_live_out (loop_end), modified);
mark_ref_regs (PATTERN (insn));
n_regs_set = 0;
- note_stores (PATTERN (insn), mark_reg_clobber, NULL);
+ note_stores (insn, mark_reg_clobber, NULL);
/* Mark any registers dead after INSN as dead now. */
Clobbers are processed again, so they conflict with
the registers that are set. */
- note_stores (PATTERN (insn), mark_reg_store, NULL);
+ note_stores (insn, mark_reg_store, NULL);
if (AUTO_INC_DEC)
for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
continue;
CLEAR_REG_SET (this_altered);
- note_stores (PATTERN (insn), mark_altered, this_altered);
+ note_stores (insn, mark_altered, this_altered);
if (CALL_P (insn))
{
/* Kill all call clobbered registers. */
if (REG_NOTE_KIND (link) == REG_DEAD)
reg_dies (XEXP (link, 0), &live_now);
- note_stores (PATTERN (insn), reg_becomes_live, &live_now);
+ note_stores (insn, reg_becomes_live, &live_now);
for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
if (REG_NOTE_KIND (link) == REG_UNUSED)
reg_dies (XEXP (link, 0), &live_now);
data.first = insns;
data.insn = insn;
data.must_stay = 0;
- note_stores (PATTERN (insn), no_conflict_move_test, &data);
+ note_stores (insn, no_conflict_move_test, &data);
if (! data.must_stay)
{
if (PREV_INSN (insn))
/* Otherwise, work out if the compare-and-swap succeeded. */
cc_reg = NULL_RTX;
if (have_insn_for (COMPARE, CCmode))
- note_stores (PATTERN (get_last_insn ()), find_cc_set, &cc_reg);
+ note_stores (get_last_insn (), find_cc_set, &cc_reg);
if (cc_reg)
{
target_bool = emit_store_flag_force (target_bool, EQ, cc_reg,
It will set mems_conflict_p to nonzero if there may be a
conflict between X and SETTER. */
mems_conflict_p = 0;
- note_stores (PATTERN (setter), find_mem_conflicts, x);
+ note_stores (setter, find_mem_conflicts, x);
if (mems_conflict_p)
return 1;
rtx note;
/* Find all stores and record them. */
- note_stores (PATTERN (insn), record_last_set_info, insn);
+ note_stores (insn, record_last_set_info, insn);
/* Also record autoincremented REGs for this insn as changed. */
for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
if (CALL_P (insn))
{
unsigned int regno;
- rtx link, x;
hard_reg_set_iterator hrsi;
EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call, 0, regno, hrsi)
record_last_reg_set_info_regno (insn, regno);
- for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
- {
- gcc_assert (GET_CODE (XEXP (link, 0)) != CLOBBER_HIGH);
- if (GET_CODE (XEXP (link, 0)) == CLOBBER)
- {
- x = XEXP (XEXP (link, 0), 0);
- if (REG_P (x))
- {
- gcc_assert (HARD_REGISTER_P (x));
- record_last_reg_set_info (insn, x);
- }
- }
- }
-
if (! RTL_CONST_OR_PURE_CALL_P (insn))
record_last_mem_set_info (insn);
}
|| reload_combine_recognize_pattern (insn))
continue;
- note_stores (PATTERN (insn), reload_combine_note_store, NULL);
+ note_stores (insn, reload_combine_note_store, NULL);
if (CALL_P (insn))
{
{
rtx setuse = XEXP (link, 0);
rtx usage_rtx = XEXP (setuse, 0);
- /* We could support CLOBBER_HIGH and treat it in the same way as
- HARD_REGNO_CALL_PART_CLOBBERED, but no port needs that yet. */
- gcc_assert (GET_CODE (setuse) != CLOBBER_HIGH);
- if ((GET_CODE (setuse) == USE || GET_CODE (setuse) == CLOBBER)
- && REG_P (usage_rtx))
+ if (GET_CODE (setuse) == USE && REG_P (usage_rtx))
{
unsigned int end_regno = END_REGNO (usage_rtx);
for (unsigned int i = REGNO (usage_rtx); i < end_regno; ++i)
- if (GET_CODE (XEXP (link, 0)) == CLOBBER)
- {
- reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
- reg_state[i].store_ruid = reload_combine_ruid;
- }
- else
- reg_state[i].use_index = -1;
+ reg_state[i].use_index = -1;
}
}
}
}
}
}
- note_stores (PATTERN (insn), move2add_note_store, insn);
+ note_stores (insn, move2add_note_store, insn);
/* If INSN is a conditional branch, we try to extract an
implicit set out of it. */
unknown values. */
if (CALL_P (insn))
{
- rtx link;
-
for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
{
if (call_used_regs[i])
/* Reset the information about this register. */
reg_mode[i] = VOIDmode;
}
-
- for (link = CALL_INSN_FUNCTION_USAGE (insn); link;
- link = XEXP (link, 1))
- {
- rtx setuse = XEXP (link, 0);
- rtx usage_rtx = XEXP (setuse, 0);
- /* CALL_INSN_FUNCTION_USAGEs can only have full clobbers, not
- clobber_highs. */
- gcc_assert (GET_CODE (setuse) != CLOBBER_HIGH);
- if (GET_CODE (setuse) == CLOBBER
- && REG_P (usage_rtx))
- {
- unsigned int end_regno = END_REGNO (usage_rtx);
- for (unsigned int r = REGNO (usage_rtx); r < end_regno; ++r)
- /* Reset the information about this register. */
- reg_mode[r] = VOIDmode;
- }
- }
}
}
return changed;
static void
kill_clobbered_values (rtx_insn *insn, struct value_data *vd)
{
- note_stores (PATTERN (insn), kill_clobbered_value, vd);
-
- if (CALL_P (insn))
- {
- rtx exp;
-
- for (exp = CALL_INSN_FUNCTION_USAGE (insn); exp; exp = XEXP (exp, 1))
- {
- rtx x = XEXP (exp, 0);
- if (GET_CODE (x) == CLOBBER)
- kill_value (SET_DEST (x), vd);
- }
- }
+ note_stores (insn, kill_clobbered_value, vd);
}
/* Perform the forward copy propagation on basic block BB. */
if (!noop_p)
{
/* Notice stores. */
- note_stores (PATTERN (insn), kill_set_value, &ksvd);
+ note_stores (insn, kill_set_value, &ksvd);
/* Notice copies. */
if (copy_p)
outside an operand, as live. */
hide_operands (n_ops, old_operands, old_dups, untracked_operands,
false);
- note_stores (PATTERN (insn), note_sets_clobbers, &clobber_code);
+ note_stores (insn, note_sets_clobbers, &clobber_code);
restore_operands (insn, n_ops, old_operands, old_dups);
/* Step 1b: Begin new chains for earlyclobbered writes inside
outside an operand, as live. */
hide_operands (n_ops, old_operands, old_dups, untracked_operands,
false);
- note_stores (PATTERN (insn), note_sets_clobbers, &set_code);
+ note_stores (insn, note_sets_clobbers, &set_code);
restore_operands (insn, n_ops, old_operands, old_dups);
/* Step 6b: Begin new chains for writes inside operands. */
cannot be done. */
for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
if (INSN_P (insn))
- note_stores (PATTERN (insn), mark_not_eliminable, NULL);
+ note_pattern_stores (PATTERN (insn), mark_not_eliminable, NULL);
maybe_fix_stack_asms ();
{
regset_head regs_to_forget;
INIT_REG_SET (®s_to_forget);
- note_stores (PATTERN (insn), forget_old_reloads_1, ®s_to_forget);
+ note_stores (insn, forget_old_reloads_1, ®s_to_forget);
/* If this is a USE and CLOBBER of a MEM, ensure that any
references to eliminable registers have been removed. */
between INSN and NEXT and use them to forget old reloads. */
for (rtx_insn *x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
- note_stores (PATTERN (x), forget_old_reloads_1, NULL);
+ note_stores (x, forget_old_reloads_1, NULL);
#if AUTO_INC_DEC
/* Likewise for regs altered by auto-increment in this insn.
clear any memory of reloaded copies of the pseudo reg.
If this output reload comes from a spill reg,
reg_has_output_reload will make this do nothing. */
- note_stores (pat, forget_old_reloads_1, NULL);
+ note_stores (p, forget_old_reloads_1, NULL);
if (reg_mentioned_p (rl_reg_rtx, pat))
{
GET_MODE (XEXP (link, 0)),
REGNO (XEXP (link, 0)));
- note_stores (PATTERN (real_insn), update_live_status, NULL);
+ note_stores (real_insn, update_live_status, NULL);
/* If any registers were unused after this insn, kill them.
These notes will always be accurate. */
#define GET_REG_NOTE_NAME(MODE) (reg_note_name[(int) (MODE)])
/* This field is only present on CALL_INSNs. It holds a chain of EXPR_LIST of
- USE and CLOBBER expressions.
+ USE, CLOBBER and SET expressions.
USE expressions list the registers filled with arguments that
are passed to the function.
CLOBBER expressions document the registers explicitly clobbered
by this CALL_INSN.
+ SET expressions say that the return value of the call (the SET_DEST)
+ is equivalent to a value available before the call (the SET_SRC).
+ This kind of SET is used when the return value is predictable in
+ advance. It is purely an optimisation hint; unlike USEs and CLOBBERs,
+ it does not affect register liveness.
+
Pseudo registers cannot be mentioned in this list. */
#define CALL_INSN_FUNCTION_USAGE(INSN) XEXP(INSN, 7)
extern void record_hard_reg_uses (rtx *, void *);
extern void find_all_hard_regs (const_rtx, HARD_REG_SET *);
extern void find_all_hard_reg_sets (const rtx_insn *, HARD_REG_SET *, bool);
-extern void note_stores (const_rtx, void (*) (rtx, const_rtx, void *), void *);
+extern void note_pattern_stores (const_rtx,
+ void (*) (rtx, const_rtx, void *), void *);
+extern void note_stores (const rtx_insn *,
+ void (*) (rtx, const_rtx, void *), void *);
extern void note_uses (rtx *, void (*) (rtx *, void *), void *);
extern int dead_or_set_p (const rtx_insn *, const_rtx);
extern int dead_or_set_regno_p (const rtx_insn *, unsigned int);
struct set_of_data data;
data.found = NULL_RTX;
data.pat = pat;
- note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
+ note_pattern_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
return data.found;
}
rtx link;
CLEAR_HARD_REG_SET (*pset);
- note_stores (PATTERN (insn), record_hard_reg_sets, pset);
- if (CALL_P (insn))
- {
- if (implicit)
- IOR_HARD_REG_SET (*pset, call_used_reg_set);
-
- for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
- record_hard_reg_sets (XEXP (link, 0), NULL, pset);
- }
+ note_stores (insn, record_hard_reg_sets, pset);
+ if (CALL_P (insn) && implicit)
+ IOR_HARD_REG_SET (*pset, call_used_reg_set);
for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
if (REG_NOTE_KIND (link) == REG_INC)
record_hard_reg_sets (XEXP (link, 0), NULL, pset);
the SUBREG will be passed. */
void
-note_stores (const_rtx x, void (*fun) (rtx, const_rtx, void *), void *data)
+note_pattern_stores (const_rtx x,
+ void (*fun) (rtx, const_rtx, void *), void *data)
{
int i;
else if (GET_CODE (x) == PARALLEL)
for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
- note_stores (XVECEXP (x, 0, i), fun, data);
+ note_pattern_stores (XVECEXP (x, 0, i), fun, data);
+}
+
+/* Same, but for an instruction. If the instruction is a call, include
+ any CLOBBERs in its CALL_INSN_FUNCTION_USAGE. */
+
+void
+note_stores (const rtx_insn *insn,
+ void (*fun) (rtx, const_rtx, void *), void *data)
+{
+ if (CALL_P (insn))
+ for (rtx link = CALL_INSN_FUNCTION_USAGE (insn);
+ link; link = XEXP (link, 1))
+ if (GET_CODE (XEXP (link, 0)) == CLOBBER)
+ note_pattern_stores (XEXP (link, 0), fun, data);
+ note_pattern_stores (PATTERN (insn), fun, data);
}
\f
/* Like notes_stores, but call FUN for each expression that is being
if (INSN_P (before))
{
int nregs_old = parm.nregs;
- note_stores (PATTERN (before), parms_set, &parm);
+ note_stores (before, parms_set, &parm);
/* If we found something that did not set a parameter reg,
we're done. Do not keep going, as that might result
in hoisting an insn before the setting of a pseudo
reg_pressure_info[cl].change = 0;
}
- note_stores (PATTERN (insn), mark_insn_reg_clobber, insn);
+ note_stores (insn, mark_insn_reg_clobber, insn);
- note_stores (PATTERN (insn), mark_insn_reg_store, insn);
+ note_stores (insn, mark_insn_reg_store, insn);
if (AUTO_INC_DEC)
for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
sets_likely_spilled (rtx pat)
{
bool ret = false;
- note_stores (pat, sets_likely_spilled_1, &ret);
+ note_pattern_stores (pat, sets_likely_spilled_1, &ret);
return ret;
}
note_uses (&PATTERN (insn), record_hard_reg_uses, &this_used);
AND_COMPL_HARD_REG_SET (this_used, prologue_clobbered);
IOR_HARD_REG_SET (prologue_used, this_used);
- note_stores (PATTERN (insn), record_hard_reg_sets, &prologue_clobbered);
+ note_stores (insn, record_hard_reg_sets, &prologue_clobbered);
}
CLEAR_HARD_REG_BIT (prologue_clobbered, STACK_POINTER_REGNUM);
if (frame_pointer_needed)
if (INSN_P (insn))
{
/* Check if insn modifies the stack pointer. */
- note_stores (PATTERN (insn),
- notice_stack_pointer_modification_1,
- NULL);
+ note_stores (insn, notice_stack_pointer_modification_1, NULL);
if (! crtl->sp_is_unchanging)
return 0;
}
amd.stack_adjust = -VTI (bb)->out.stack_adjust;
amd.store = true;
- note_stores (PATTERN (insn), adjust_mem_stores, &amd);
+ note_stores (insn, adjust_mem_stores, &amd);
amd.store = false;
if (GET_CODE (PATTERN (insn)) == PARALLEL
insert notes before it without worrying about any
notes that MO_USEs might emit after the insn. */
cui.store_p = true;
- note_stores (PATTERN (insn), add_stores, &cui);
+ note_stores (insn, add_stores, &cui);
n2 = VTI (bb)->mos.length () - 1;
mos = VTI (bb)->mos.address ();