+2004-05-19 Zdenek Dvorak <rakdver@atrey.karlin.mff.cuni.cz>
+
+ PR c++/15463
+ * loop-iv.c (iv_number_of_iterations): Use trunc_int_for_mode on
+ result of inverse.
+
+ PR rtl-optimization/15274
+ * loop-iv.c (determine_max_iter, shorten_into_mode,
+ iv_number_of_iterations): Handle constants correctly.
+ * rtl.h (get_mode_bounds): Declaration changed.
+ * stor-layout.c (get_mode_bounds): Return a constant suitable for
+ the target mode.
+
+ PR rtl-optimization/14692
+ * loop-unswitch.c (may_unswitch_on): Try folding the result.
+ (unswitch_single_loop): Work correctly when may_unswitch_on
+ returns a folded constant.
+
+ * loop-iv.c (implies_p): Handle A < B ==> A + 1 <= B.
+ * simplify-rtx.c (simplify_const_relational_operation): Optimize
+ comparisons with mode bounds.
+
+ * function.c (struct temp_slot): Add new field prev.
+ (free_after_compilation, init_temp_slots): Free new fields.
+ (cut_slot_from_list, insert_slot_to_list,
+ temp_slots_at_level, max_slot_level, move_slot_to_level,
+ make_slot_available): New functions.
+ (assign_stack_temp_for_type, combine_temp_slots,
+ find_temp_slot_from_address, preserve_temp_slots,
+ preserve_rtl_expr_result, free_temp_slots,
+ free_temps_for_rtl_expr, pop_temp_slots): Work with
+ the new structure of lists.
+ (mark_all_temps_used): Removed.
+ * function.h (struct function): Field x_temp_slots
+ replaced by x_used_temp_slots and x_avail_temp_slots.
+ (temp_slots): Replaced by ...
+ (used_temp_slots, avail_temp_slots): New.
+ * tree.h (mark_all_temps_used): Declaration removed.
+
+ * loop-iv.c (mark_single_set, get_biv_step_1, iv_analyze,
+ simplify_using_assignment): Take the expression out of
+ the expr_list wrapper.
+
+ * loop-iv.c (iv_number_of_iterations): Improve clasification of
+ infinite loops.
+
2004-05-19 Roger Sayle <roger@eyesopen.com>
* doc/tm.texi (TARGET_RTX_COSTS): Document that instruction
{
/* Points to next temporary slot. */
struct temp_slot *next;
+ /* Points to previous temporary slot. */
+ struct temp_slot *prev;
+
/* The rtx to used to reference the slot. */
rtx slot;
/* The rtx used to represent the address if not the address of the
f->varasm = NULL;
f->machine = NULL;
- f->x_temp_slots = NULL;
+ f->x_avail_temp_slots = NULL;
+ f->x_used_temp_slots = NULL;
f->arg_offset_rtx = NULL;
f->return_rtx = NULL;
f->internal_arg_pointer = NULL;
{
return assign_stack_local_1 (mode, size, align, cfun);
}
+
+\f
+/* Removes temporary slot TEMP from LIST. */
+
+static void
+cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
+{
+ if (temp->next)
+ temp->next->prev = temp->prev;
+ if (temp->prev)
+ temp->prev->next = temp->next;
+ else
+ *list = temp->next;
+
+ temp->prev = temp->next = NULL;
+}
+
+/* Inserts temporary slot TEMP to LIST. */
+
+static void
+insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
+{
+ temp->next = *list;
+ if (*list)
+ (*list)->prev = temp;
+ temp->prev = NULL;
+ *list = temp;
+}
+
+/* Returns the list of used temp slots at LEVEL. */
+
+static struct temp_slot **
+temp_slots_at_level (int level)
+{
+ level++;
+
+ if (!used_temp_slots)
+ VARRAY_GENERIC_PTR_INIT (used_temp_slots, 3, "used_temp_slots");
+
+ while (level >= (int) VARRAY_ACTIVE_SIZE (used_temp_slots))
+ VARRAY_PUSH_GENERIC_PTR (used_temp_slots, NULL);
+
+ return (struct temp_slot **) &VARRAY_GENERIC_PTR (used_temp_slots, level);
+}
+
+/* Returns the maximal temporary slot level. */
+
+static int
+max_slot_level (void)
+{
+ if (!used_temp_slots)
+ return -1;
+
+ return VARRAY_ACTIVE_SIZE (used_temp_slots) - 1;
+}
+
+/* Moves temporary slot TEMP to LEVEL. */
+
+static void
+move_slot_to_level (struct temp_slot *temp, int level)
+{
+ cut_slot_from_list (temp, temp_slots_at_level (temp->level));
+ insert_slot_to_list (temp, temp_slots_at_level (level));
+ temp->level = level;
+}
+
+/* Make temporary slot TEMP available. */
+
+static void
+make_slot_available (struct temp_slot *temp)
+{
+ cut_slot_from_list (temp, temp_slots_at_level (temp->level));
+ insert_slot_to_list (temp, &avail_temp_slots);
+ temp->in_use = 0;
+ temp->level = -1;
+}
\f
/* Allocate a temporary stack slot and record it for possible later
reuse.
tree type)
{
unsigned int align;
- struct temp_slot *p, *best_p = 0;
+ struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
rtx slot;
/* If SIZE is -1 it means that somebody tried to allocate a temporary
/* Try to find an available, already-allocated temporary of the proper
mode which meets the size and alignment requirements. Choose the
smallest one with the closest alignment. */
- for (p = temp_slots; p; p = p->next)
- if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
- && ! p->in_use
- && objects_must_conflict_p (p->type, type)
- && (best_p == 0 || best_p->size > p->size
- || (best_p->size == p->size && best_p->align > p->align)))
- {
- if (p->align == align && p->size == size)
- {
- best_p = 0;
- break;
- }
- best_p = p;
- }
+ for (p = avail_temp_slots; p; p = p->next)
+ {
+ if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
+ && objects_must_conflict_p (p->type, type)
+ && (best_p == 0 || best_p->size > p->size
+ || (best_p->size == p->size && best_p->align > p->align)))
+ {
+ if (p->align == align && p->size == size)
+ {
+ selected = p;
+ cut_slot_from_list (selected, &avail_temp_slots);
+ best_p = 0;
+ break;
+ }
+ best_p = p;
+ }
+ }
/* Make our best, if any, the one to use. */
if (best_p)
{
+ selected = best_p;
+ cut_slot_from_list (selected, &avail_temp_slots);
+
/* If there are enough aligned bytes left over, make them into a new
temp_slot so that the extra bytes don't get wasted. Do this only
for BLKmode slots, so that we can be sure of the alignment. */
p->address = 0;
p->rtl_expr = 0;
p->type = best_p->type;
- p->next = temp_slots;
- temp_slots = p;
+ insert_slot_to_list (p, &avail_temp_slots);
stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
stack_slot_list);
best_p->full_size = rounded_size;
}
}
-
- p = best_p;
}
/* If we still didn't find one, make a new temporary. */
- if (p == 0)
+ if (selected == 0)
{
HOST_WIDE_INT frame_offset_old = frame_offset;
p->full_size = frame_offset - frame_offset_old;
#endif
p->address = 0;
- p->next = temp_slots;
- temp_slots = p;
+
+ selected = p;
}
+ p = selected;
p->in_use = 1;
p->addr_taken = 0;
p->rtl_expr = seq_rtl_expr;
p->keep = keep;
}
+ pp = temp_slots_at_level (p->level);
+ insert_slot_to_list (p, pp);
/* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
void
combine_temp_slots (void)
{
- struct temp_slot *p, *q;
- struct temp_slot *prev_p, *prev_q;
+ struct temp_slot *p, *q, *next, *next_q;
int num_slots;
/* We can't combine slots, because the information about which slot
/* If there are a lot of temp slots, don't do anything unless
high levels of optimization. */
if (! flag_expensive_optimizations)
- for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
+ for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
if (num_slots > 100 || (num_slots > 10 && optimize == 0))
return;
- for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
+ for (p = avail_temp_slots; p; p = next)
{
int delete_p = 0;
- if (! p->in_use && GET_MODE (p->slot) == BLKmode)
- for (q = p->next, prev_q = p; q; q = prev_q->next)
- {
- int delete_q = 0;
- if (! q->in_use && GET_MODE (q->slot) == BLKmode)
- {
- if (p->base_offset + p->full_size == q->base_offset)
- {
- /* Q comes after P; combine Q into P. */
- p->size += q->size;
- p->full_size += q->full_size;
- delete_q = 1;
- }
- else if (q->base_offset + q->full_size == p->base_offset)
- {
- /* P comes after Q; combine P into Q. */
- q->size += p->size;
- q->full_size += p->full_size;
- delete_p = 1;
- break;
- }
- }
- /* Either delete Q or advance past it. */
- if (delete_q)
- prev_q->next = q->next;
- else
- prev_q = q;
- }
- /* Either delete P or advance past it. */
- if (delete_p)
+ next = p->next;
+
+ if (GET_MODE (p->slot) != BLKmode)
+ continue;
+
+ for (q = p->next; q; q = next_q)
{
- if (prev_p)
- prev_p->next = p->next;
- else
- temp_slots = p->next;
+ int delete_q = 0;
+
+ next_q = q->next;
+
+ if (GET_MODE (q->slot) != BLKmode)
+ continue;
+
+ if (p->base_offset + p->full_size == q->base_offset)
+ {
+ /* Q comes after P; combine Q into P. */
+ p->size += q->size;
+ p->full_size += q->full_size;
+ delete_q = 1;
+ }
+ else if (q->base_offset + q->full_size == p->base_offset)
+ {
+ /* P comes after Q; combine P into Q. */
+ q->size += p->size;
+ q->full_size += p->full_size;
+ delete_p = 1;
+ break;
+ }
+ if (delete_q)
+ cut_slot_from_list (q, &avail_temp_slots);
}
- else
- prev_p = p;
+
+ /* Either delete P or advance past it. */
+ if (delete_p)
+ cut_slot_from_list (p, &avail_temp_slots);
}
}
\f
{
struct temp_slot *p;
rtx next;
+ int i;
- for (p = temp_slots; p; p = p->next)
- {
- if (! p->in_use)
- continue;
-
- else if (XEXP (p->slot, 0) == x
- || p->address == x
- || (GET_CODE (x) == PLUS
- && XEXP (x, 0) == virtual_stack_vars_rtx
- && GET_CODE (XEXP (x, 1)) == CONST_INT
- && INTVAL (XEXP (x, 1)) >= p->base_offset
- && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
- return p;
-
- else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
- for (next = p->address; next; next = XEXP (next, 1))
- if (XEXP (next, 0) == x)
- return p;
- }
+ for (i = max_slot_level (); i >= 0; i--)
+ for (p = *temp_slots_at_level (i); p; p = p->next)
+ {
+ if (XEXP (p->slot, 0) == x
+ || p->address == x
+ || (GET_CODE (x) == PLUS
+ && XEXP (x, 0) == virtual_stack_vars_rtx
+ && GET_CODE (XEXP (x, 1)) == CONST_INT
+ && INTVAL (XEXP (x, 1)) >= p->base_offset
+ && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
+ return p;
+
+ else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
+ for (next = p->address; next; next = XEXP (next, 1))
+ if (XEXP (next, 0) == x)
+ return p;
+ }
/* If we have a sum involving a register, see if it points to a temp
slot. */
void
preserve_temp_slots (rtx x)
{
- struct temp_slot *p = 0;
+ struct temp_slot *p = 0, *next;
/* If there is no result, we still might have some objects whose address
were taken, so we need to make sure they stay around. */
if (x == 0)
{
- for (p = temp_slots; p; p = p->next)
- if (p->in_use && p->level == temp_slot_level && p->addr_taken)
- p->level--;
+ for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
+ {
+ next = p->next;
+
+ if (p->addr_taken)
+ move_slot_to_level (p, temp_slot_level - 1);
+ }
return;
}
taken. */
if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
{
- for (p = temp_slots; p; p = p->next)
- if (p->in_use && p->level == temp_slot_level && p->addr_taken)
- p->level--;
+ for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
+ {
+ next = p->next;
+
+ if (p->addr_taken)
+ move_slot_to_level (p, temp_slot_level - 1);
+ }
return;
}
if (p->level == temp_slot_level)
{
- for (q = temp_slots; q; q = q->next)
- if (q != p && q->addr_taken && q->level == p->level)
- q->level--;
+ for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
+ {
+ next = q->next;
- p->level--;
+ if (p != q && q->addr_taken)
+ move_slot_to_level (q, temp_slot_level - 1);
+ }
+
+ move_slot_to_level (p, temp_slot_level - 1);
p->addr_taken = 0;
}
return;
}
/* Otherwise, preserve all non-kept slots at this level. */
- for (p = temp_slots; p; p = p->next)
- if (p->in_use && p->level == temp_slot_level && ! p->keep)
- p->level--;
+ for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
+ {
+ next = p->next;
+
+ if (!p->keep)
+ move_slot_to_level (p, temp_slot_level - 1);
+ }
}
/* X is the result of an RTL_EXPR. If it is a temporary slot associated
p = find_temp_slot_from_address (XEXP (x, 0));
if (p != 0)
{
- p->level = MIN (p->level, temp_slot_level);
+ move_slot_to_level (p, MIN (p->level, temp_slot_level));
p->rtl_expr = 0;
}
void
free_temp_slots (void)
{
- struct temp_slot *p;
+ struct temp_slot *p, *next;
+
+ for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
+ {
+ next = p->next;
- for (p = temp_slots; p; p = p->next)
- if (p->in_use && p->level == temp_slot_level && ! p->keep
- && p->rtl_expr == 0)
- p->in_use = 0;
+ if (!p->keep && p->rtl_expr == 0)
+ make_slot_available (p);
+ }
combine_temp_slots ();
}
void
free_temps_for_rtl_expr (tree t)
{
- struct temp_slot *p;
-
- for (p = temp_slots; p; p = p->next)
- if (p->rtl_expr == t)
- {
- /* If this slot is below the current TEMP_SLOT_LEVEL, then it
- needs to be preserved. This can happen if a temporary in
- the RTL_EXPR was addressed; preserve_temp_slots will move
- the temporary into a higher level. */
- if (temp_slot_level <= p->level)
- p->in_use = 0;
- else
- p->rtl_expr = NULL_TREE;
- }
-
- combine_temp_slots ();
-}
+ struct temp_slot *p, *next;
-/* Mark all temporaries ever allocated in this function as not suitable
- for reuse until the current level is exited. */
-
-void
-mark_all_temps_used (void)
-{
- struct temp_slot *p;
-
- for (p = temp_slots; p; p = p->next)
+ for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
{
- p->in_use = p->keep = 1;
- p->level = MIN (p->level, temp_slot_level);
+ next = p->next;
+
+ if (p->rtl_expr == t)
+ {
+ /* If this slot is below the current TEMP_SLOT_LEVEL, then it
+ needs to be preserved. This can happen if a temporary in
+ the RTL_EXPR was addressed; preserve_temp_slots will move
+ the temporary into a higher level. */
+ if (temp_slot_level <= p->level)
+ make_slot_available (p);
+ else
+ p->rtl_expr = NULL_TREE;
+ }
}
+
+ combine_temp_slots ();
}
/* Push deeper into the nesting level for stack temporaries. */
void
pop_temp_slots (void)
{
- struct temp_slot *p;
+ struct temp_slot *p, *next;
- for (p = temp_slots; p; p = p->next)
- if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
- p->in_use = 0;
+ for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
+ {
+ next = p->next;
+
+ if (p->rtl_expr == 0)
+ make_slot_available (p);
+ }
combine_temp_slots ();
init_temp_slots (void)
{
/* We have not allocated any temporaries yet. */
- temp_slots = 0;
+ avail_temp_slots = 0;
+ used_temp_slots = 0;
temp_slot_level = 0;
var_temp_slot_level = 0;
target_temp_slot_level = 0;
element in this vector is one less than MAX_PARM_REG, above. */
rtx * GTY ((length ("%h.x_max_parm_reg"))) x_parm_reg_stack_loc;
- /* List of all temporaries allocated, both available and in use. */
- struct temp_slot *x_temp_slots;
+ /* List of all used temporaries allocated, by level. */
+ struct varray_head_tag * GTY((param_is (struct temp_slot))) x_used_temp_slots;
+
+ /* List of available temp slots. */
+ struct temp_slot *x_avail_temp_slots;
/* Current nesting level for temporaries. */
int x_temp_slot_level;
#define rtl_expr_chain (cfun->x_rtl_expr_chain)
#define last_parm_insn (cfun->x_last_parm_insn)
#define function_call_count (cfun->x_function_call_count)
-#define temp_slots (cfun->x_temp_slots)
+#define used_temp_slots (cfun->x_used_temp_slots)
+#define avail_temp_slots (cfun->x_avail_temp_slots)
#define temp_slot_level (cfun->x_temp_slot_level)
#define target_temp_slot_level (cfun->x_target_temp_slot_level)
#define var_temp_slot_level (cfun->x_var_temp_slot_level)
unsigned regno, uid;
src = find_reg_equal_equiv_note (insn);
- if (!src)
+ if (src)
+ src = XEXP (src, 0);
+ else
src = SET_SRC (set);
if (!simple_set_p (SET_DEST (set), src))
set = single_set (insn);
rhs = find_reg_equal_equiv_note (insn);
- if (!rhs)
+ if (rhs)
+ rhs = XEXP (rhs, 0);
+ else
rhs = SET_SRC (set);
lhs = SET_DEST (set);
set = single_set (insn);
rhs = find_reg_equal_equiv_note (insn);
- if (!rhs)
+ if (rhs)
+ rhs = XEXP (rhs, 0);
+ else
rhs = SET_SRC (set);
code = GET_CODE (rhs);
}
}
- get_mode_bounds (desc->mode, desc->signed_p, &mmin, &mmax);
+ get_mode_bounds (desc->mode, desc->signed_p, desc->mode, &mmin, &mmax);
nmax = INTVAL (mmax) - INTVAL (mmin);
if (GET_CODE (niter) == UDIV)
return;
rhs = find_reg_equal_equiv_note (insn);
- if (!rhs)
+ if (rhs)
+ rhs = XEXP (rhs, 0);
+ else
rhs = SET_SRC (set);
if (!simple_rhs_p (rhs))
static bool
implies_p (rtx a, rtx b)
{
- rtx op0, op1, r;
+ rtx op0, op1, opb0, opb1, r;
+ enum machine_mode mode;
if (GET_CODE (a) == EQ)
{
}
}
+ /* A < B implies A + 1 <= B. */
+ if ((GET_CODE (a) == GT || GET_CODE (a) == LT)
+ && (GET_CODE (b) == GE || GET_CODE (b) == LE))
+ {
+ op0 = XEXP (a, 0);
+ op1 = XEXP (a, 1);
+ opb0 = XEXP (b, 0);
+ opb1 = XEXP (b, 1);
+
+ if (GET_CODE (a) == GT)
+ {
+ r = op0;
+ op0 = op1;
+ op1 = r;
+ }
+
+ if (GET_CODE (b) == GE)
+ {
+ r = opb0;
+ opb0 = opb1;
+ opb1 = r;
+ }
+
+ mode = GET_MODE (op0);
+ if (mode != GET_MODE (opb0))
+ mode = VOIDmode;
+ else if (mode == VOIDmode)
+ {
+ mode = GET_MODE (op1);
+ if (mode != GET_MODE (opb1))
+ mode = VOIDmode;
+ }
+
+ if (mode != VOIDmode
+ && rtx_equal_p (op1, opb1)
+ && simplify_gen_binary (MINUS, mode, opb0, op0) == const1_rtx)
+ return true;
+ }
+
return false;
}
{
rtx mmin, mmax, cond_over, cond_under;
- get_mode_bounds (mode, signed_p, &mmin, &mmax);
+ get_mode_bounds (mode, signed_p, iv->extend_mode, &mmin, &mmax);
cond_under = simplify_gen_relational (LT, SImode, iv->extend_mode,
iv->base, mmin);
cond_over = simplify_gen_relational (GT, SImode, iv->extend_mode,
{
rtx op0, op1, delta, step, bound, may_xform, def_insn, tmp, tmp0, tmp1;
struct rtx_iv iv0, iv1, tmp_iv;
- rtx assumption;
+ rtx assumption, may_not_xform;
enum rtx_code cond;
enum machine_mode mode, comp_mode;
- rtx mmin, mmax;
- unsigned HOST_WIDEST_INT s, size, d;
+ rtx mmin, mmax, mode_mmin, mode_mmax;
+ unsigned HOST_WIDEST_INT s, size, d, inv;
HOST_WIDEST_INT up, down, inc;
int was_sharp = false;
comp_mode = iv0.extend_mode;
mode = iv0.mode;
size = GET_MODE_BITSIZE (mode);
- get_mode_bounds (mode, (cond == LE || cond == LT), &mmin, &mmax);
+ get_mode_bounds (mode, (cond == LE || cond == LT), comp_mode, &mmin, &mmax);
+ mode_mmin = lowpart_subreg (mode, mmin, comp_mode);
+ mode_mmax = lowpart_subreg (mode, mmax, comp_mode);
if (GET_CODE (iv0.step) != CONST_INT || GET_CODE (iv1.step) != CONST_INT)
goto fail;
if (iv0.step == const0_rtx)
{
tmp = lowpart_subreg (mode, iv0.base, comp_mode);
- assumption = simplify_gen_relational (EQ, SImode, mode, tmp, mmax);
+ assumption = simplify_gen_relational (EQ, SImode, mode, tmp,
+ mode_mmax);
if (assumption == const_true_rtx)
goto zero_iter;
iv0.base = simplify_gen_binary (PLUS, comp_mode,
else
{
tmp = lowpart_subreg (mode, iv1.base, comp_mode);
- assumption = simplify_gen_relational (EQ, SImode, mode, tmp, mmin);
+ assumption = simplify_gen_relational (EQ, SImode, mode, tmp,
+ mode_mmin);
if (assumption == const_true_rtx)
goto zero_iter;
iv1.base = simplify_gen_binary (PLUS, comp_mode,
if (iv0.step == const0_rtx)
{
tmp = lowpart_subreg (mode, iv0.base, comp_mode);
- if (rtx_equal_p (tmp, mmin))
+ if (rtx_equal_p (tmp, mode_mmin))
{
desc->infinite =
alloc_EXPR_LIST (0, const_true_rtx, NULL_RTX);
else
{
tmp = lowpart_subreg (mode, iv1.base, comp_mode);
- if (rtx_equal_p (tmp, mmax))
+ if (rtx_equal_p (tmp, mode_mmax))
{
desc->infinite =
alloc_EXPR_LIST (0, const_true_rtx, NULL_RTX);
delta = lowpart_subreg (mode, delta, comp_mode);
delta = simplify_gen_binary (UMOD, mode, delta, step);
may_xform = const0_rtx;
+ may_not_xform = const_true_rtx;
if (GET_CODE (delta) == CONST_INT)
{
tmp = lowpart_subreg (mode, iv0.base, comp_mode);
may_xform = simplify_gen_relational (cond, SImode, mode,
bound, tmp);
+ may_not_xform = simplify_gen_relational (reverse_condition (cond),
+ SImode, mode,
+ bound, tmp);
}
else
{
tmp = lowpart_subreg (mode, iv1.base, comp_mode);
may_xform = simplify_gen_relational (cond, SImode, mode,
tmp, bound);
+ may_not_xform = simplify_gen_relational (reverse_condition (cond),
+ SImode, mode,
+ tmp, bound);
}
}
completely senseless. This is OK, as we would need this assumption
to determine the number of iterations anyway. */
if (may_xform != const_true_rtx)
- desc->assumptions = alloc_EXPR_LIST (0, may_xform,
- desc->assumptions);
+ {
+ /* If the step is a power of two and the final value we have
+ computed overflows, the cycle is infinite. Otherwise it
+ is nontrivial to compute the number of iterations. */
+ s = INTVAL (step);
+ if ((s & (s - 1)) == 0)
+ desc->infinite = alloc_EXPR_LIST (0, may_not_xform,
+ desc->infinite);
+ else
+ desc->assumptions = alloc_EXPR_LIST (0, may_xform,
+ desc->assumptions);
+ }
/* We are going to lose some information about upper bound on
number of iterations in this step, so record the information
if (GET_CODE (iv1.base) == CONST_INT)
up = INTVAL (iv1.base);
else
- up = INTVAL (mmax) - inc;
- down = INTVAL (GET_CODE (iv0.base) == CONST_INT ? iv0.base : mmin);
+ up = INTVAL (mode_mmax) - inc;
+ down = INTVAL (GET_CODE (iv0.base) == CONST_INT
+ ? iv0.base
+ : mode_mmin);
desc->niter_max = (up - down) / inc + 1;
if (iv0.step == const0_rtx)
desc->infinite = alloc_EXPR_LIST (0, assumption, desc->infinite);
tmp = simplify_gen_binary (UDIV, mode, tmp1, GEN_INT (d));
- tmp = simplify_gen_binary (MULT, mode,
- tmp, GEN_INT (inverse (s, size)));
+ inv = inverse (s, size);
+ inv = trunc_int_for_mode (inv, mode);
+ tmp = simplify_gen_binary (MULT, mode, tmp, GEN_INT (inv));
desc->niter_expr = simplify_gen_binary (AND, mode, tmp, bound);
}
else
tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
- bound = simplify_gen_binary (MINUS, mode, mmax, step);
+ bound = simplify_gen_binary (MINUS, mode, mode_mmax,
+ lowpart_subreg (mode, step, comp_mode));
assumption = simplify_gen_relational (cond, SImode, mode,
tmp1, bound);
desc->assumptions =
tmp0 = lowpart_subreg (mode, iv0.base, comp_mode);
tmp1 = lowpart_subreg (mode, iv1.base, comp_mode);
- bound = simplify_gen_binary (MINUS, mode, mmin, step);
+ bound = simplify_gen_binary (MINUS, mode, mode_mmin,
+ lowpart_subreg (mode, step, comp_mode));
assumption = simplify_gen_relational (cond, SImode, mode,
bound, tmp0);
desc->assumptions =
static rtx
may_unswitch_on (basic_block bb, struct loop *loop, rtx *cinsn)
{
- rtx test, at, insn, op[2];
+ rtx test, at, insn, op[2], stest;
struct rtx_iv iv;
unsigned i;
enum machine_mode mode;
return test;
}
+ stest = simplify_gen_relational (GET_CODE (test), SImode,
+ mode, op[0], op[1]);
+ if (stest == const0_rtx
+ || stest == const_true_rtx)
+ return stest;
+
return canon_condition (gen_rtx_fmt_ee (GET_CODE (test), SImode,
op[0], op[1]));
}
basic_block *bbs;
struct loop *nloop;
unsigned i;
- rtx cond, rcond, conds, rconds, acond, cinsn = NULL_RTX;
+ rtx cond, rcond = NULL_RTX, conds, rconds, acond, cinsn = NULL_RTX;
int repeat;
edge e;
return;
}
- rcond = reversed_condition (cond);
- if (rcond)
- rcond = canon_condition (rcond);
+ if (cond != const0_rtx
+ && cond != const_true_rtx)
+ {
+ rcond = reversed_condition (cond);
+ if (rcond)
+ rcond = canon_condition (rcond);
- /* Check whether the result can be predicted. */
- for (acond = cond_checked; acond; acond = XEXP (acond, 1))
- simplify_using_condition (XEXP (acond, 0), &cond, NULL);
+ /* Check whether the result can be predicted. */
+ for (acond = cond_checked; acond; acond = XEXP (acond, 1))
+ simplify_using_condition (XEXP (acond, 0), &cond, NULL);
+ }
if (cond == const_true_rtx)
{
extern void variable_tracking_main (void);
/* In stor-layout.c. */
-extern void get_mode_bounds (enum machine_mode, int, rtx *, rtx *);
+extern void get_mode_bounds (enum machine_mode, int, enum machine_mode,
+ rtx *, rtx *);
/* In loop-unswitch.c */
extern rtx reversed_condition (rtx);
/* Otherwise, there are some code-specific tests we can make. */
else
{
+ /* Optimize comparisons with upper and lower bounds. */
+ if (INTEGRAL_MODE_P (mode)
+ && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
+ {
+ rtx mmin, mmax;
+ int sign;
+
+ if (code == GEU
+ || code == LEU
+ || code == GTU
+ || code == LTU)
+ sign = 0;
+ else
+ sign = 1;
+
+ get_mode_bounds (mode, sign, mode, &mmin, &mmax);
+
+ tem = NULL_RTX;
+ switch (code)
+ {
+ case GEU:
+ case GE:
+ /* x >= min is always true. */
+ if (rtx_equal_p (trueop1, mmin))
+ tem = const_true_rtx;
+ else
+ break;
+
+ case LEU:
+ case LE:
+ /* x <= max is always true. */
+ if (rtx_equal_p (trueop1, mmax))
+ tem = const_true_rtx;
+ break;
+
+ case GTU:
+ case GT:
+ /* x > max is always false. */
+ if (rtx_equal_p (trueop1, mmax))
+ tem = const0_rtx;
+ break;
+
+ case LTU:
+ case LT:
+ /* x < min is always false. */
+ if (rtx_equal_p (trueop1, mmin))
+ tem = const0_rtx;
+ break;
+
+ default:
+ break;
+ }
+ if (tem == const0_rtx
+ || tem == const_true_rtx)
+ return tem;
+ }
+
switch (code)
{
case EQ:
return const_true_rtx;
break;
- case GEU:
- /* Unsigned values are never negative. */
- if (trueop1 == const0_rtx)
- return const_true_rtx;
- break;
-
- case LTU:
- if (trueop1 == const0_rtx)
- return const0_rtx;
- break;
-
- case LEU:
- /* Unsigned values are never greater than the largest
- unsigned value. */
- if (GET_CODE (trueop1) == CONST_INT
- && (unsigned HOST_WIDE_INT) INTVAL (trueop1) == GET_MODE_MASK (mode)
- && INTEGRAL_MODE_P (mode))
- return const_true_rtx;
- break;
-
- case GTU:
- if (GET_CODE (trueop1) == CONST_INT
- && (unsigned HOST_WIDE_INT) INTVAL (trueop1) == GET_MODE_MASK (mode)
- && INTEGRAL_MODE_P (mode))
- return const0_rtx;
- break;
-
case LT:
/* Optimize abs(x) < 0.0. */
if (trueop1 == CONST0_RTX (mode) && !HONOR_SNANS (mode))
}
/* Gets minimal and maximal values for MODE (signed or unsigned depending on
- SIGN). */
+ SIGN). The returned constants are made to be usable in TARGET_MODE. */
void
-get_mode_bounds (enum machine_mode mode, int sign, rtx *mmin, rtx *mmax)
+get_mode_bounds (enum machine_mode mode, int sign,
+ enum machine_mode target_mode,
+ rtx *mmin, rtx *mmax)
{
- int size = GET_MODE_BITSIZE (mode);
+ unsigned size = GET_MODE_BITSIZE (mode);
+ unsigned HOST_WIDE_INT min_val, max_val;
if (size > HOST_BITS_PER_WIDE_INT)
abort ();
if (sign)
{
- *mmin = GEN_INT (-((unsigned HOST_WIDE_INT) 1 << (size - 1)));
- *mmax = GEN_INT (((unsigned HOST_WIDE_INT) 1 << (size - 1)) - 1);
+ min_val = -((unsigned HOST_WIDE_INT) 1 << (size - 1));
+ max_val = ((unsigned HOST_WIDE_INT) 1 << (size - 1)) - 1;
}
else
{
- *mmin = const0_rtx;
- *mmax = GEN_INT (((unsigned HOST_WIDE_INT) 1 << (size - 1) << 1) - 1);
+ min_val = 0;
+ max_val = ((unsigned HOST_WIDE_INT) 1 << (size - 1) << 1) - 1;
}
+
+ *mmin = GEN_INT (trunc_int_for_mode (min_val, target_mode));
+ *mmax = GEN_INT (trunc_int_for_mode (max_val, target_mode));
}
#include "gt-stor-layout.h"
extern void flush_addressof (tree);
extern void setjmp_vars_warning (tree);
extern void setjmp_args_warning (void);
-extern void mark_all_temps_used (void);
extern void init_temp_slots (void);
extern void combine_temp_slots (void);
extern void free_temp_slots (void);