static void force_movables PARAMS ((struct loop_movables *));
static void combine_movables PARAMS ((struct loop_movables *,
struct loop_regs *));
+static int num_unmoved_movables PARAMS ((const struct loop *));
static int regs_match_p PARAMS ((rtx, rtx, struct loop_movables *));
static int rtx_equal_for_loop_p PARAMS ((rtx, rtx, struct loop_movables *,
struct loop_regs *));
static void loop_givs_rescan PARAMS((struct loop *, struct iv_class *,
rtx *));
static void loop_ivs_free PARAMS((struct loop *));
-static void strength_reduce PARAMS ((struct loop *, int, int));
+static void strength_reduce PARAMS ((struct loop *, int));
static void find_single_use_in_loop PARAMS ((struct loop_regs *, rtx, rtx));
static int valid_initial_value_p PARAMS ((rtx, rtx, int, rtx));
static void find_mem_givs PARAMS ((const struct loop *, rtx, rtx, int, int));
static void record_initial PARAMS ((rtx, rtx, void *));
static void update_reg_last_use PARAMS ((rtx, rtx));
static rtx next_insn_in_loop PARAMS ((const struct loop *, rtx));
-static void loop_regs_scan PARAMS ((const struct loop*, int, int *));
+static void loop_regs_scan PARAMS ((const struct loop *, int));
+static int count_insns_in_loop PARAMS ((const struct loop *));
static void load_mems PARAMS ((const struct loop *));
static int insert_loop_mem PARAMS ((rtx *, void *));
static int replace_loop_mem PARAMS ((rtx *, void *));
movables->head = 0;
movables->last = 0;
- movables->num = 0;
/* Determine whether this loop starts with a jump down to a test at
the end. This will occur for a small number of loops with a test
/* Allocate extra space for REGs that might be created by load_mems.
We allocate a little extra slop as well, in the hopes that we
won't have to reallocate the regs array. */
- loop_regs_scan (loop, loop_info->mems_idx + 16, &insn_count);
+ loop_regs_scan (loop, loop_info->mems_idx + 16);
+ insn_count = count_insns_in_loop (loop);
if (loop_dump_stream)
{
/* Recalculate regs->array if load_mems has created new registers. */
if (max_reg_num () > regs->num)
- loop_regs_scan (loop, 0, &insn_count);
+ loop_regs_scan (loop, 0);
for (update_start = loop_start;
PREV_INSN (update_start)
/* Ensure our label doesn't go away. */
LABEL_NUSES (update_end)++;
- strength_reduce (loop, insn_count, flags);
+ strength_reduce (loop, flags);
reg_scan_update (update_start, update_end, loop_max_reg);
loop_max_reg = max_reg_num ();
/* Clean up. */
free (matched_regs);
}
+
+/* Returns the number of movable instructions in LOOP that were not
+ moved outside the loop. */
+
+static int
+num_unmoved_movables (loop)
+ const struct loop *loop;
+{
+ int num = 0;
+ struct movable *m;
+
+ for (m = LOOP_MOVABLES (loop)->head; m; m = m->next)
+ if (!m->done)
+ ++num;
+
+ return num;
+}
+
\f
/* Return 1 if regs X and Y will become the same if moved. */
rtx *reg_map = (rtx *) xcalloc (nregs, sizeof (rtx));
char *already_moved = (char *) xcalloc (nregs, sizeof (char));
- movables->num = 0;
-
for (m = movables->head; m; m = m->next)
{
/* Describe this movable insn. */
INSN_UID (m->forces->insn));
}
- /* Count movables. Value used in heuristics in strength_reduce. */
- movables->num++;
-
/* Ignore the insn if it's already done (it matched something else).
Otherwise, see if it is now safe to move. */
must check regnos to make sure they are in bounds. */
static void
-strength_reduce (loop, insn_count, flags)
+strength_reduce (loop, flags)
struct loop *loop;
- int insn_count;
int flags;
{
struct loop_info *loop_info = LOOP_INFO (loop);
int reg_map_size;
int unrolled_insn_copies = 0;
rtx test_reg = gen_rtx_REG (word_mode, LAST_VIRTUAL_REGISTER + 1);
+ int insn_count = count_insns_in_loop (loop);
addr_placeholder = gen_reg_rtx (Pmode);
{
struct induction *v;
- reversible_mem_store
- = (! loop_info->unknown_address_altered
- && ! loop_info->unknown_constant_address_altered
- && ! loop_invariant_p (loop,
- XEXP (XEXP (loop_info->store_mems, 0),
- 0)));
+ /* If we could prove that each of the memory locations
+ written to was different, then we could reverse the
+ store -- but we don't presently have any way of
+ knowing that. */
+ reversible_mem_store = 0;
/* If the store depends on a register that is set after the
store, it depends on the initial value, and is thus not
&& ! loop_info->has_volatile
&& reversible_mem_store
&& (bl->giv_count + bl->biv_count + loop_info->num_mem_sets
- + LOOP_MOVABLES (loop)->num + compare_and_branch == insn_count)
+ + num_unmoved_movables (loop) + compare_and_branch == insn_count)
&& (bl == ivs->list && bl->next == 0))
|| no_use_except_counting)
{
parameter may be zero, in which case this processing is not done.
Set REGS->ARRAY[I].MAY_NOT_OPTIMIZE nonzero if we should not
- optimize register I.
-
- Store in *COUNT_PTR the number of actual instructions
- in the loop. We use this to decide what is worth moving out. */
+ optimize register I. */
static void
-loop_regs_scan (loop, extra_size, count_ptr)
+loop_regs_scan (loop, extra_size)
const struct loop *loop;
int extra_size;
- int *count_ptr;
{
struct loop_regs *regs = LOOP_REGS (loop);
int old_nregs;
basic block. In that case, it is the insn that last set reg n. */
rtx *last_set;
rtx insn;
- int count = 0;
int i;
old_nregs = regs->num;
{
if (INSN_P (insn))
{
- ++count;
-
/* Record registers that have exactly one use. */
find_single_use_in_loop (regs, insn, PATTERN (insn));
regs->array[i].n_times_set = regs->array[i].set_in_loop;
free (last_set);
- *count_ptr = count;
}
+/* Returns the number of real INSNs in the LOOP. */
+
+static int
+count_insns_in_loop (loop)
+ const struct loop *loop;
+{
+ int count = 0;
+ rtx insn;
+
+ for (insn = loop->top ? loop->top : loop->start; insn != loop->end;
+ insn = NEXT_INSN (insn))
+ if (INSN_P (insn))
+ ++count;
+
+ return count;
+}
/* Move MEMs into registers for the duration of the loop. */