* config/c6x/eqf.c: New file.
* config/c6x/libgcc-c6xeabi.ver: New file.
+ Revert
+ 2003-07-10 Eric Botcazou <ebotcazou@libertysurf.fr>
+ PR rtl-optimization/11320
+ * sched-int.h (struct deps) [reg_conditional_sets]: New field.
+ (struct sched_info) [compute_jump_reg_dependencies]: New prototype.
+ * sched-deps.c (sched_analyze_insn) [JUMP_INSN]: Update call to
+ current_sched_info->compute_jump_reg_dependencies. Record which
+ registers are used and which registers are set by the jump.
+ Clear deps->reg_conditional_sets after a barrier.
+ Set deps->reg_conditional_sets if the insn is a COND_EXEC.
+ Clear deps->reg_conditional_sets if the insn is not a COND_EXEC.
+ (init_deps): Initialize reg_conditional_sets.
+ (free_deps): Clear reg_conditional_sets.
+ * sched-ebb.c (compute_jump_reg_dependencies): New prototype.
+ Mark registers live on entry of the fallthrough block and conditionally
+ set as set by the jump. Mark registers live on entry of non-fallthrough
+ blocks as used by the jump.
+ * sched-rgn.c (compute_jump_reg_dependencies): New prototype.
+ Mark new parameters as unused.
+
2011-07-14 Andrew Pinski <pinskia@gmail.com>
PR tree-opt/49309
static void
compute_jump_reg_dependencies (rtx insn ATTRIBUTE_UNUSED,
- regset cond_exec ATTRIBUTE_UNUSED,
- regset used ATTRIBUTE_UNUSED,
- regset set ATTRIBUTE_UNUSED)
+ regset used ATTRIBUTE_UNUSED)
{
}
(rev1==rev2
? reversed_comparison_code (cond2, NULL)
: GET_CODE (cond2))
- && XEXP (cond1, 0) == XEXP (cond2, 0)
+ && rtx_equal_p (XEXP (cond1, 0), XEXP (cond2, 0))
&& XEXP (cond1, 1) == XEXP (cond2, 1))
return 1;
return 0;
if (sched_deps_info->compute_jump_reg_dependencies)
{
- regset_head tmp_uses, tmp_sets;
- INIT_REG_SET (&tmp_uses);
- INIT_REG_SET (&tmp_sets);
+ regset_head tmp;
+ INIT_REG_SET (&tmp);
+
+ (*sched_deps_info->compute_jump_reg_dependencies) (insn, &tmp);
- (*sched_deps_info->compute_jump_reg_dependencies)
- (insn, &deps->reg_conditional_sets, &tmp_uses, &tmp_sets);
/* Make latency of jump equal to 0 by using anti-dependence. */
- EXECUTE_IF_SET_IN_REG_SET (&tmp_uses, 0, i, rsi)
+ EXECUTE_IF_SET_IN_REG_SET (&tmp, 0, i, rsi)
{
struct deps_reg *reg_last = &deps->reg_last[i];
add_dependence_list (insn, reg_last->sets, 0, REG_DEP_ANTI);
reg_last->uses = alloc_INSN_LIST (insn, reg_last->uses);
}
}
- IOR_REG_SET (reg_pending_sets, &tmp_sets);
- CLEAR_REG_SET (&tmp_uses);
- CLEAR_REG_SET (&tmp_sets);
+ CLEAR_REG_SET (&tmp);
}
/* All memory writes and volatile reads must happen before the
add_dependence_list (insn, reg_last->uses, 0, REG_DEP_ANTI);
if (!deps->readonly)
- {
- reg_last->sets = alloc_INSN_LIST (insn, reg_last->sets);
- SET_REGNO_REG_SET (&deps->reg_conditional_sets, i);
- }
+ reg_last->sets = alloc_INSN_LIST (insn, reg_last->sets);
}
}
else
reg_last->sets = alloc_INSN_LIST (insn, reg_last->sets);
reg_last->uses_length = 0;
reg_last->clobbers_length = 0;
- CLEAR_REGNO_REG_SET (&deps->reg_conditional_sets, i);
}
}
}
&& sel_insn_is_speculation_check (insn)))
flush_pending_lists (deps, insn, true, true);
- if (!deps->readonly)
- CLEAR_REG_SET (&deps->reg_conditional_sets);
reg_pending_barrier = NOT_A_BARRIER;
}
else
deps->reg_last = XCNEWVEC (struct deps_reg, max_reg);
INIT_REG_SET (&deps->reg_last_in_use);
- INIT_REG_SET (&deps->reg_conditional_sets);
deps->pending_read_insns = 0;
deps->pending_read_mems = 0;
free_INSN_LIST_list (®_last->clobbers);
}
CLEAR_REG_SET (&deps->reg_last_in_use);
- CLEAR_REG_SET (&deps->reg_conditional_sets);
/* As we initialize reg_last lazily, it is possible that we didn't allocate
it at all. */
deps = NULL;
}
-/* Remove INSN from dependence contexts DEPS. Caution: reg_conditional_sets
- is not handled. */
+/* Remove INSN from dependence contexts DEPS. */
void
remove_from_deps (struct deps_desc *deps, rtx insn)
{
return 1;
}
- /* INSN is a JUMP_INSN, COND_SET is the set of registers that are
- conditionally set before INSN. Store the set of registers that
- must be considered as used by this jump in USED and that of
- registers that must be considered as set in SET. */
+ /* INSN is a JUMP_INSN. Store the set of registers that
+ must be considered as used by this jump in USED. */
void
-ebb_compute_jump_reg_dependencies (rtx insn, regset cond_set, regset used,
- regset set)
+ebb_compute_jump_reg_dependencies (rtx insn, regset used)
{
basic_block b = BLOCK_FOR_INSN (insn);
edge e;
edge_iterator ei;
FOR_EACH_EDGE (e, ei, b->succs)
- if (e->flags & EDGE_FALLTHRU)
- /* The jump may be a by-product of a branch that has been merged
- in the main codepath after being conditionalized. Therefore
- it may guard the fallthrough block from using a value that has
- conditionally overwritten that of the main codepath. So we
- consider that it restores the value of the main codepath. */
- bitmap_and (set, df_get_live_in (e->dest), cond_set);
- else
+ if ((e->flags & EDGE_FALLTHRU) == 0)
bitmap_ior_into (used, df_get_live_in (e->dest));
}
extern int max_issue (struct ready_list *, int, state_t, bool, int *);
-extern void ebb_compute_jump_reg_dependencies (rtx, regset, regset, regset);
+extern void ebb_compute_jump_reg_dependencies (rtx, regset);
extern edge find_fallthru_edge_from (basic_block);
in reg_last[N].{uses,sets,clobbers}. */
regset_head reg_last_in_use;
- /* Element N is set for each register that is conditionally set. */
- regset_head reg_conditional_sets;
-
/* Shows the last value of reg_pending_barrier associated with the insn. */
enum reg_pending_barrier_mode last_reg_pending_barrier;
/* Called when computing dependencies for a JUMP_INSN. This function
should store the set of registers that must be considered as set by
the jump in the regset. */
- void (*compute_jump_reg_dependencies) (rtx, regset, regset, regset);
+ void (*compute_jump_reg_dependencies) (rtx, regset);
/* Start analyzing insn. */
void (*start_insn) (rtx);
static int schedule_more_p (void);
static const char *rgn_print_insn (const_rtx, int);
static int rgn_rank (rtx, rtx);
-static void compute_jump_reg_dependencies (rtx, regset, regset, regset);
+static void compute_jump_reg_dependencies (rtx, regset);
/* Functions for speculative scheduling. */
static void rgn_add_remove_insn (rtx, int);
return BLOCK_TO_BB (BLOCK_NUM (next)) == BLOCK_TO_BB (BLOCK_NUM (insn));
}
-/* INSN is a JUMP_INSN, COND_SET is the set of registers that are
- conditionally set before INSN. Store the set of registers that
- must be considered as used by this jump in USED and that of
- registers that must be considered as set in SET. */
+/* INSN is a JUMP_INSN. Store the set of registers that must be
+ considered as used by this jump in USED. */
static void
compute_jump_reg_dependencies (rtx insn ATTRIBUTE_UNUSED,
- regset cond_exec ATTRIBUTE_UNUSED,
- regset used ATTRIBUTE_UNUSED,
- regset set ATTRIBUTE_UNUSED)
+ regset used ATTRIBUTE_UNUSED)
{
/* Nothing to do here, since we postprocess jumps in
add_branch_dependences. */