+2018-08-06 Alan Hayward <alan.hayward@arm.com>
+
+ * alias.c (record_set): Check for clobber high.
+ * cfgexpand.c (expand_gimple_stmt): Likewise.
+ * combine-stack-adj.c (single_set_for_csa): Likewise.
+ * combine.c (find_single_use_1): Likewise.
+ (set_nonzero_bits_and_sign_copies): Likewise.
+ (get_combine_src_dest): Likewise.
+ (is_parallel_of_n_reg_sets): Likewise.
+ (try_combine): Likewise.
+ (record_dead_and_set_regs_1): Likewise.
+ (reg_dead_at_p_1): Likewise.
+ (reg_dead_at_p): Likewise.
+ * dce.c (deletable_insn_p): Likewise.
+ (mark_nonreg_stores_1): Likewise.
+ (mark_nonreg_stores_2): Likewise.
+ * df-scan.c (df_find_hard_reg_defs): Likewise.
+ (df_uses_record): Likewise.
+ (df_get_call_refs): Likewise.
+ * dwarf2out.c (mem_loc_descriptor): Likewise.
+ * haifa-sched.c (haifa_classify_rtx): Likewise.
+ * ira-build.c (create_insn_allocnos): Likewise.
+ * ira-costs.c (scan_one_insn): Likewise.
+ * ira.c (equiv_init_movable_p): Likewise.
+ (rtx_moveable_p): Likewise.
+ (interesting_dest_for_shprep): Likewise.
+ * jump.c (mark_jump_label_1): Likewise.
+ * postreload-gcse.c (record_opr_changes): Likewise.
+ * postreload.c (reload_cse_simplify): Likewise.
+ (struct reg_use): Add source expr.
+ (reload_combine): Check for clobber high.
+ (reload_combine_note_use): Likewise.
+ (reload_cse_move2add): Likewise.
+ (move2add_note_store): Likewise.
+ * print-rtl.c (print_pattern): Likewise.
+ * recog.c (decode_asm_operands): Likewise.
+ (store_data_bypass_p): Likewise.
+ (if_test_bypass_p): Likewise.
+ * regcprop.c (kill_clobbered_value): Likewise.
+ (kill_set_value): Likewise.
+ * reginfo.c (reg_scan_mark_refs): Likewise.
+ * reload1.c (maybe_fix_stack_asms): Likewise.
+ (eliminate_regs_1): Likewise.
+ (elimination_effects): Likewise.
+ (mark_not_eliminable): Likewise.
+ (scan_paradoxical_subregs): Likewise.
+ (forget_old_reloads_1): Likewise.
+ * reorg.c (find_end_label): Likewise.
+ (try_merge_delay_insns): Likewise.
+ (redundant_insn): Likewise.
+ (own_thread_p): Likewise.
+ (fill_simple_delay_slots): Likewise.
+ (fill_slots_from_thread): Likewise.
+ (dbr_schedule): Likewise.
+ * resource.c (update_live_status): Likewise.
+ (mark_referenced_resources): Likewise.
+ (mark_set_resources): Likewise.
+ * rtl.c (copy_rtx): Likewise.
+ * rtlanal.c (reg_referenced_p): Likewise.
+ (single_set_2): Likewise.
+ (noop_move_p): Likewise.
+ (note_stores): Likewise.
+ * sched-deps.c (sched_analyze_reg): Likewise.
+ (sched_analyze_insn): Likewise.
+
2018-08-06 Alan Hayward <alan.hayward@arm.com>
* cse.c (invalidate_reg): New function extracted from...
new_reg_base_value[regno] = 0;
return;
}
+ /* A CLOBBER_HIGH only wipes out the old value if the mode of the old
+ value is greater than that of the clobber. */
+ else if (GET_CODE (set) == CLOBBER_HIGH)
+ {
+ if (new_reg_base_value[regno] != 0
+ && reg_is_clobbered_by_clobber_high (
+ regno, GET_MODE (new_reg_base_value[regno]), XEXP (set, 0)))
+ new_reg_base_value[regno] = 0;
+ return;
+ }
+
src = SET_SRC (set);
}
else
/* If we want exceptions for non-call insns, any
may_trap_p instruction may throw. */
&& GET_CODE (PATTERN (insn)) != CLOBBER
+ && GET_CODE (PATTERN (insn)) != CLOBBER_HIGH
&& GET_CODE (PATTERN (insn)) != USE
&& insn_could_throw_p (insn))
make_reg_eh_region_note (insn, 0, lp_nr);
&& SET_SRC (this_rtx) == SET_DEST (this_rtx))
;
else if (GET_CODE (this_rtx) != CLOBBER
+ && GET_CODE (this_rtx) != CLOBBER_HIGH
&& GET_CODE (this_rtx) != USE)
return NULL_RTX;
}
case SYMBOL_REF:
CASE_CONST_ANY:
case CLOBBER:
+ case CLOBBER_HIGH:
return 0;
case SET:
return;
}
+ /* Should not happen as we only using pseduo registers. */
+ gcc_assert (GET_CODE (set) != CLOBBER_HIGH);
+
/* If this register is being initialized using itself, and the
register is uninitialized in this basic block, and there are
no LOG_LINKS which set the register, then part of the
/* We can ignore CLOBBERs. */
case CLOBBER:
+ case CLOBBER_HIGH:
break;
case SET:
|| !REG_P (SET_DEST (XVECEXP (pat, 0, i))))
return false;
for ( ; i < len; i++)
- if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER
- || XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
- return false;
-
+ switch (GET_CODE (XVECEXP (pat, 0, i)))
+ {
+ case CLOBBER:
+ if (XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
+ return false;
+ break;
+ case CLOBBER_HIGH:
+ break;
+ default:
+ return false;
+ }
return true;
}
for (i = 0; ok && i < XVECLEN (p2, 0); i++)
{
if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
- || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
+ || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER
+ || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER_HIGH)
&& reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
SET_DEST (XVECEXP (p2, 0, i))))
ok = false;
? SET_SRC (setter)
: gen_lowpart (GET_MODE (dest),
SET_SRC (setter)));
+ else if (GET_CODE (setter) == CLOBBER_HIGH)
+ {
+ reg_stat_type *rsp = ®_stat[REGNO (dest)];
+ if (rsp->last_set_value
+ && reg_is_clobbered_by_clobber_high
+ (REGNO (dest), GET_MODE (rsp->last_set_value),
+ XEXP (setter, 0)))
+ record_value_for_reg (dest, NULL, NULL_RTX);
+ }
else
record_value_for_reg (dest, record_dead_insn, NULL_RTX);
}
static unsigned int reg_dead_regno, reg_dead_endregno;
static int reg_dead_flag;
+rtx reg_dead_reg;
/* Function called via note_stores from reg_dead_at_p.
if (!REG_P (dest))
return;
+ if (GET_CODE (x) == CLOBBER_HIGH
+ && !reg_is_clobbered_by_clobber_high (reg_dead_reg, XEXP (x, 0)))
+ return;
+
regno = REGNO (dest);
endregno = END_REGNO (dest);
if (reg_dead_endregno > regno && reg_dead_regno < endregno)
/* Set variables for reg_dead_at_p_1. */
reg_dead_regno = REGNO (reg);
reg_dead_endregno = END_REGNO (reg);
+ reg_dead_reg = reg;
reg_dead_flag = 0;
return false;
case CLOBBER:
+ case CLOBBER_HIGH:
if (fast)
{
/* A CLOBBER of a dead pseudo register serves no purpose.
mark_nonreg_stores_1 (rtx dest, const_rtx pattern, void *data)
{
if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
- mark_insn ((rtx_insn *) data, true);
+ {
+ gcc_checking_assert (GET_CODE (pattern) != CLOBBER_HIGH);
+ mark_insn ((rtx_insn *) data, true);
+ }
}
mark_nonreg_stores_2 (rtx dest, const_rtx pattern, void *data)
{
if (GET_CODE (pattern) != CLOBBER && !REG_P (dest))
- mark_insn ((rtx_insn *) data, false);
+ {
+ gcc_checking_assert (GET_CODE (pattern) != CLOBBER_HIGH);
+ mark_insn ((rtx_insn *) data, false);
+ }
}
break;
case CLOBBER:
+ case CLOBBER_HIGH:
df_find_hard_reg_defs_1 (XEXP (x, 0), defs);
break;
/* If we're clobbering a REG then we have a def so ignore. */
return;
+ case CLOBBER_HIGH:
+ gcc_assert (REG_P (XEXP (x, 0)));
+ return;
+
case MEM:
df_uses_record (collection_rec,
&XEXP (x, 0), DF_REF_REG_MEM_LOAD,
for (note = CALL_INSN_FUNCTION_USAGE (insn_info->insn); note;
note = XEXP (note, 1))
{
+ gcc_assert (GET_CODE (XEXP (note, 0)) != CLOBBER_HIGH);
if (GET_CODE (XEXP (note, 0)) == USE)
df_uses_record (collection_rec, &XEXP (XEXP (note, 0), 0),
DF_REF_REG_USE, bb, insn_info, flags);
case CONST_FIXED:
case CLRSB:
case CLOBBER:
+ case CLOBBER_HIGH:
/* If delegitimize_address couldn't do anything with the UNSPEC, we
can't express it in the debug info. This can happen e.g. with some
TLS UNSPECs. */
/* Test if it is a 'store'. */
tmp_class = may_trap_exp (XEXP (x, 0), 1);
break;
+ case CLOBBER_HIGH:
+ gcc_assert (REG_P (XEXP (x, 0)));
+ break;
case SET:
/* Test if it is a store. */
tmp_class = may_trap_exp (SET_DEST (x), 1);
create_insn_allocnos (XEXP (x, 0), NULL, true);
return;
}
+ else if (code == CLOBBER_HIGH)
+ {
+ gcc_assert (REG_P (XEXP (x, 0)) && HARD_REGISTER_P (XEXP (x, 0)));
+ return;
+ }
else if (code == MEM)
{
create_insn_allocnos (XEXP (x, 0), NULL, false);
return insn;
}
+ if (pat_code == CLOBBER_HIGH)
+ {
+ gcc_assert (REG_P (XEXP (PATTERN (insn), 0))
+ && HARD_REGISTER_P (XEXP (PATTERN (insn), 0)));
+ return insn;
+ }
+
counted_mem = false;
set = single_set (insn);
extract_insn (insn);
case CC0:
case CLOBBER:
+ case CLOBBER_HIGH:
return 0;
case PRE_INC:
&& rtx_moveable_p (&XEXP (x, 2), OP_IN));
case CLOBBER:
+ case CLOBBER_HIGH:
return rtx_moveable_p (&SET_DEST (x), OP_OUT);
case UNSPEC_VOLATILE:
for (int i = 0; i < XVECLEN (pat, 0); i++)
{
rtx sub = XVECEXP (pat, 0, i);
- if (GET_CODE (sub) == USE || GET_CODE (sub) == CLOBBER)
+ if (GET_CODE (sub) == USE
+ || GET_CODE (sub) == CLOBBER
+ || GET_CODE (sub) == CLOBBER_HIGH)
continue;
if (GET_CODE (sub) != SET
|| side_effects_p (sub))
case CC0:
case REG:
case CLOBBER:
+ case CLOBBER_HIGH:
case CALL:
return;
record_last_reg_set_info_regno (insn, regno);
for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
- if (GET_CODE (XEXP (link, 0)) == CLOBBER)
- {
- x = XEXP (XEXP (link, 0), 0);
- if (REG_P (x))
- {
- gcc_assert (HARD_REGISTER_P (x));
- record_last_reg_set_info (insn, x);
- }
- }
+ {
+ gcc_assert (GET_CODE (XEXP (link, 0)) != CLOBBER_HIGH);
+ if (GET_CODE (XEXP (link, 0)) == CLOBBER)
+ {
+ x = XEXP (XEXP (link, 0), 0);
+ if (REG_P (x))
+ {
+ gcc_assert (HARD_REGISTER_P (x));
+ record_last_reg_set_info (insn, x);
+ }
+ }
+ }
if (! RTL_CONST_OR_PURE_CALL_P (insn))
record_last_mem_set_info (insn);
for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
{
rtx part = XVECEXP (body, 0, i);
+ /* asms can only have full clobbers, not clobber_highs. */
+ gcc_assert (GET_CODE (part) != CLOBBER_HIGH);
if (GET_CODE (part) == CLOBBER && REG_P (XEXP (part, 0)))
cselib_invalidate_rtx (XEXP (part, 0));
}
}
}
else if (GET_CODE (part) != CLOBBER
+ && GET_CODE (part) != CLOBBER_HIGH
&& GET_CODE (part) != USE)
break;
}
STORE_RUID is always meaningful if we only want to use a value in a
register in a different place: it denotes the next insn in the insn
stream (i.e. the last encountered) that sets or clobbers the register.
- REAL_STORE_RUID is similar, but clobbers are ignored when updating it. */
+ REAL_STORE_RUID is similar, but clobbers are ignored when updating it.
+ EXPR is the expression used when storing the register. */
static struct
{
struct reg_use reg_use[RELOAD_COMBINE_MAX_USES];
int real_store_ruid;
int use_ruid;
bool all_offsets_match;
+ rtx expr;
} reg_state[FIRST_PSEUDO_REGISTER];
/* Reverse linear uid. This is increased in reload_combine while scanning
{
rtx setuse = XEXP (link, 0);
rtx usage_rtx = XEXP (setuse, 0);
+ /* We could support CLOBBER_HIGH and treat it in the same way as
+ HARD_REGNO_CALL_PART_CLOBBERED, but no port needs that yet. */
+ gcc_assert (GET_CODE (setuse) != CLOBBER_HIGH);
+
if ((GET_CODE (setuse) == USE || GET_CODE (setuse) == CLOBBER)
&& REG_P (usage_rtx))
{
}
break;
+ case CLOBBER_HIGH:
+ gcc_assert (REG_P (SET_DEST (x)));
+ return;
+
case PLUS:
/* We are interested in (plus (reg) (const_int)) . */
if (!REG_P (XEXP (x, 0))
{
rtx setuse = XEXP (link, 0);
rtx usage_rtx = XEXP (setuse, 0);
+ /* CALL_INSN_FUNCTION_USAGEs can only have full clobbers, not
+ clobber_highs. */
+ gcc_assert (GET_CODE (setuse) != CLOBBER_HIGH);
if (GET_CODE (setuse) == CLOBBER
&& REG_P (usage_rtx))
{
move2add_record_mode (dst);
}
+ else if (GET_CODE (set) == CLOBBER_HIGH)
+ {
+ /* Only invalidate if actually clobbered. */
+ if (reg_mode[regno] == BLKmode
+ || reg_is_clobbered_by_clobber_high (regno, reg_mode[regno], dst))
+ goto invalidate;
+ }
else
{
invalidate:
print_exp (pp, x, verbose);
break;
case CLOBBER:
+ case CLOBBER_HIGH:
case USE:
pp_printf (pp, "%s ", GET_RTX_NAME (GET_CODE (x)));
print_value (pp, XEXP (x, 0), verbose);
{
if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
break; /* Past last SET */
+ gcc_assert (GET_CODE (XVECEXP (body, 0, i)) == SET);
if (operands)
operands[i] = SET_DEST (XVECEXP (body, 0, i));
if (operand_locs)
{
rtx out_exp = XVECEXP (out_pat, 0, i);
- if (GET_CODE (out_exp) == CLOBBER || GET_CODE (out_exp) == USE)
+ if (GET_CODE (out_exp) == CLOBBER || GET_CODE (out_exp) == USE
+ || GET_CODE (out_exp) == CLOBBER_HIGH)
continue;
gcc_assert (GET_CODE (out_exp) == SET);
{
rtx in_exp = XVECEXP (in_pat, 0, i);
- if (GET_CODE (in_exp) == CLOBBER || GET_CODE (in_exp) == USE)
+ if (GET_CODE (in_exp) == CLOBBER || GET_CODE (in_exp) == USE
+ || GET_CODE (in_exp) == CLOBBER_HIGH)
continue;
gcc_assert (GET_CODE (in_exp) == SET);
{
rtx exp = XVECEXP (out_pat, 0, i);
- if (GET_CODE (exp) == CLOBBER)
+ if (GET_CODE (exp) == CLOBBER || GET_CODE (exp) == CLOBBER_HIGH)
continue;
gcc_assert (GET_CODE (exp) == SET);
kill_clobbered_value (rtx x, const_rtx set, void *data)
{
struct value_data *const vd = (struct value_data *) data;
- if (GET_CODE (set) == CLOBBER)
+ gcc_assert (GET_CODE (set) != CLOBBER_HIGH || REG_P (x));
+
+ if (GET_CODE (set) == CLOBBER
+ || (GET_CODE (set) == CLOBBER_HIGH
+ && reg_is_clobbered_by_clobber_high (x, XEXP (set, 0))))
kill_value (x, vd);
}
struct kill_set_value_data *ksvd = (struct kill_set_value_data *) data;
if (rtx_equal_p (x, ksvd->ignore_set_reg))
return;
- if (GET_CODE (set) != CLOBBER)
+
+ gcc_assert (GET_CODE (set) != CLOBBER_HIGH || REG_P (x));
+ if (GET_CODE (set) != CLOBBER && GET_CODE (set) != CLOBBER_HIGH)
{
kill_value (x, ksvd->vd);
if (REG_P (x))
reg_scan_mark_refs (XEXP (XEXP (x, 0), 0), insn);
break;
+ case CLOBBER_HIGH:
+ gcc_assert (!(MEM_P (XEXP (x, 0))));
+ break;
+
case SET:
/* Count a set of the destination if it is a register. */
for (dest = SET_DEST (x);
rtx t = XVECEXP (pat, 0, i);
if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
+ /* CLOBBER_HIGH is only supported for LRA. */
+ gcc_assert (GET_CODE (t) != CLOBBER_HIGH);
}
/* Get the operand values and constraints out of the insn. */
return x;
case CLOBBER:
+ case CLOBBER_HIGH:
case ASM_OPERANDS:
gcc_assert (insn && DEBUG_INSN_P (insn));
break;
elimination_effects (XEXP (x, 0), mem_mode);
return;
+ case CLOBBER_HIGH:
+ /* CLOBBER_HIGH is only supported for LRA. */
+ return;
+
case SET:
/* Check for setting a register that we know about. */
if (REG_P (SET_DEST (x)))
if (dest == hard_frame_pointer_rtx)
return;
+ /* CLOBBER_HIGH is only supported for LRA. */
+ gcc_assert (GET_CODE (x) != CLOBBER_HIGH);
+
for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
&& (GET_CODE (x) != SET
case PC:
case USE:
case CLOBBER:
+ case CLOBBER_HIGH:
return;
case SUBREG:
to be forgotten later. */
static void
-forget_old_reloads_1 (rtx x, const_rtx ignored ATTRIBUTE_UNUSED,
+forget_old_reloads_1 (rtx x, const_rtx setter,
void *data)
{
unsigned int regno;
if (!REG_P (x))
return;
+ /* CLOBBER_HIGH is only supported for LRA. */
+ gcc_assert (GET_CODE (setter) != CLOBBER_HIGH);
+
regno = REGNO (x);
if (regno >= FIRST_PSEUDO_REGISTER)
while (NOTE_P (insn)
|| (NONJUMP_INSN_P (insn)
&& (GET_CODE (PATTERN (insn)) == USE
- || GET_CODE (PATTERN (insn)) == CLOBBER)))
+ || GET_CODE (PATTERN (insn)) == CLOBBER
+ || GET_CODE (PATTERN (insn)) == CLOBBER_HIGH)))
insn = PREV_INSN (insn);
/* When a target threads its epilogue we might already have a
/* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
if (NONJUMP_INSN_P (trial)
- && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER))
+ && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
+ || GET_CODE (pat) == CLOBBER_HIGH))
continue;
if (GET_CODE (next_to_match) == GET_CODE (trial)
--insns_to_search;
pat = PATTERN (trial);
- if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
+ if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
+ || GET_CODE (pat) == CLOBBER_HIGH)
continue;
if (GET_CODE (trial) == DEBUG_INSN)
--insns_to_search;
pat = PATTERN (trial);
- if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
+ if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
+ || GET_CODE (pat) == CLOBBER_HIGH)
continue;
if (GET_CODE (trial) == DEBUG_INSN)
|| LABEL_P (insn)
|| (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) != USE
- && GET_CODE (PATTERN (insn)) != CLOBBER))
+ && GET_CODE (PATTERN (insn)) != CLOBBER
+ && GET_CODE (PATTERN (insn)) != CLOBBER_HIGH))
return 0;
return 1;
pat = PATTERN (trial);
/* Stand-alone USE and CLOBBER are just for flow. */
- if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
+ if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
+ || GET_CODE (pat) == CLOBBER_HIGH)
continue;
/* And DEBUG_INSNs never go into delay slots. */
pat = PATTERN (trial);
/* Stand-alone USE and CLOBBER are just for flow. */
- if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
+ if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
+ || GET_CODE (pat) == CLOBBER_HIGH)
continue;
/* And DEBUG_INSNs do not go in delay slots. */
}
pat = PATTERN (trial);
- if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
+ if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
+ || GET_CODE (pat) == CLOBBER_HIGH)
continue;
if (GET_CODE (trial) == DEBUG_INSN)
if (! insn->deleted ()
&& NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) != USE
- && GET_CODE (PATTERN (insn)) != CLOBBER)
+ && GET_CODE (PATTERN (insn)) != CLOBBER
+ && GET_CODE (PATTERN (insn)) != CLOBBER_HIGH)
{
if (GET_CODE (PATTERN (insn)) == SEQUENCE)
{
if (GET_CODE (x) == CLOBBER)
for (i = first_regno; i < last_regno; i++)
CLEAR_HARD_REG_BIT (current_live_regs, i);
+ else if (GET_CODE (x) == CLOBBER_HIGH)
+ /* No current target supports both branch delay slots and CLOBBER_HIGH.
+ We'd need more elaborate liveness tracking to handle that
+ combination. */
+ gcc_unreachable ();
else
for (i = first_regno; i < last_regno; i++)
{
return;
case CLOBBER:
+ case CLOBBER_HIGH:
return;
case CALL_INSN:
for (link = CALL_INSN_FUNCTION_USAGE (call_insn);
link; link = XEXP (link, 1))
- if (GET_CODE (XEXP (link, 0)) == CLOBBER)
- mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1,
- MARK_SRC_DEST);
+ {
+ /* We could support CLOBBER_HIGH and treat it in the same way as
+ HARD_REGNO_CALL_PART_CLOBBERED, but no port needs that
+ yet. */
+ gcc_assert (GET_CODE (XEXP (link, 0)) != CLOBBER_HIGH);
+ if (GET_CODE (XEXP (link, 0)) == CLOBBER)
+ mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1,
+ MARK_SRC_DEST);
+ }
/* Check for a REG_SETJMP. If it exists, then we must
assume that this call can clobber any register. */
mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
return;
+ case CLOBBER_HIGH:
+ /* No current target supports both branch delay slots and CLOBBER_HIGH.
+ We'd need more elaborate liveness tracking to handle that
+ combination. */
+ gcc_unreachable ();
+
case SEQUENCE:
{
rtx_sequence *seq = as_a <rtx_sequence *> (x);
return orig;
break;
+ case CLOBBER_HIGH:
+ gcc_assert (REG_P (XEXP (orig, 0)));
+ return orig;
+
case CONST:
if (shared_const_p (orig))
return orig;
return 1;
return 0;
+ case CLOBBER_HIGH:
+ gcc_assert (REG_P (XEXP (body, 0)));
+ return 0;
+
case COND_EXEC:
if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
return 1;
{
struct set_of_data *const data = (struct set_of_data *) (data1);
if (rtx_equal_p (x, data->pat)
- || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
+ || (GET_CODE (pat) == CLOBBER_HIGH
+ && REGNO(data->pat) == REGNO(XEXP (pat, 0))
+ && reg_is_clobbered_by_clobber_high (data->pat, XEXP (pat, 0)))
+ || (GET_CODE (pat) != CLOBBER_HIGH && !MEM_P (x)
+ && reg_overlap_mentioned_p (data->pat, x)))
data->found = pat;
}
{
case USE:
case CLOBBER:
+ case CLOBBER_HIGH:
break;
case SET:
rtx tem = XVECEXP (pat, 0, i);
if (GET_CODE (tem) == USE
- || GET_CODE (tem) == CLOBBER)
+ || GET_CODE (tem) == CLOBBER
+ || GET_CODE (tem) == CLOBBER_HIGH)
continue;
if (GET_CODE (tem) != SET || ! set_noop_p (tem))
if (GET_CODE (x) == COND_EXEC)
x = COND_EXEC_CODE (x);
- if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
+ if (GET_CODE (x) == SET
+ || GET_CODE (x) == CLOBBER
+ || GET_CODE (x) == CLOBBER_HIGH)
{
rtx dest = SET_DEST (x);
while (--i >= 0)
note_reg_use (regno + i);
}
+ else if (ref == CLOBBER_HIGH)
+ {
+ gcc_assert (i == 1);
+ /* We don't know the current state of the register, so have to treat
+ the clobber high as a full clobber. */
+ note_reg_clobber (regno);
+ }
else
{
while (--i >= 0)
else if (ref == USE)
note_reg_use (regno);
else
+ /* For CLOBBER_HIGH, we don't know the current state of the register,
+ so have to treat it as a full clobber. */
note_reg_clobber (regno);
/* Pseudos that are REG_EQUIV to something may be replaced
sub = COND_EXEC_CODE (sub);
code = GET_CODE (sub);
}
- if (code == SET || code == CLOBBER)
+ else if (code == SET || code == CLOBBER || code == CLOBBER_HIGH)
sched_analyze_1 (deps, sub, insn);
else
sched_analyze_2 (deps, sub, insn);
{
if (GET_CODE (XEXP (link, 0)) == CLOBBER)
sched_analyze_1 (deps, XEXP (link, 0), insn);
+ else if (GET_CODE (XEXP (link, 0)) == CLOBBER_HIGH)
+ /* We could support CLOBBER_HIGH and treat it in the same way as
+ HARD_REGNO_CALL_PART_CLOBBERED, but no port needs that yet. */
+ gcc_unreachable ();
else if (GET_CODE (XEXP (link, 0)) != SET)
sched_analyze_2 (deps, XEXP (link, 0), insn);
}