+2005-08-16 James A. Morrison <phython@gcc.gnu.org>
+
+ * fold-const.c (optimize_bit_field_compare): Remove extra fold call.
+ (try_move_mult_to_index): Call fold_build2 instead of build2.
+ (fold_binary): Don't call fold after calls to try_move_mult_to_index.
+ * tree-ssa-loop-niter.c (inverse): Call int_const_binop instead of
+ fold_binary_to_constant.
+ (infer_loop_bounds_from_undefined): Call fold_build2 instead of
+ fold (build.
+ * tree-data-ref.c (tree_fold_divides_p): Use tree_int_cst_equal to
+ check if A == gcd (A, B). Remove TYPE argument.
+ (analyze_offset) Use fold_build2 instead of fold (build.
+ (create_data_ref): Likewise.
+ (analyze_siv_subscript_cst_affine): Update calls to tree_fold_divides_p.
+ * tree-ssa-ccp.c (widen_bitfield): Call fold_build2 instead of build2
+ then fold.
+
2005-08-17 Kelley Cook <kcook@gcc.gnu.org>
* config/arm/unaligned-funcs.c,config/i386/crtfastmath.c,
TREE_THIS_VOLATILE (lhs) = 1;
}
- rhs = fold (const_binop (BIT_AND_EXPR,
- const_binop (LSHIFT_EXPR,
- fold_convert (unsigned_type, rhs),
- size_int (lbitpos), 0),
- mask, 0));
+ rhs = const_binop (BIT_AND_EXPR,
+ const_binop (LSHIFT_EXPR,
+ fold_convert (unsigned_type, rhs),
+ size_int (lbitpos), 0),
+ mask, 0);
return build2 (code, compare_type,
build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
TREE_OPERAND (pos, 1)),
fold_convert (itype, delta));
- return build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
+ return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
}
{
tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
if (tem)
- return fold_convert (type, fold (tem));
+ return fold_convert (type, tem);
}
else if (TREE_CODE (arg1) == ADDR_EXPR)
{
tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
if (tem)
- return fold_convert (type, fold (tem));
+ return fold_convert (type, tem);
}
}
else
{
tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
if (tem)
- return fold_convert (type, fold (tem));
+ return fold_convert (type, tem);
}
if (flag_unsafe_math_optimizations
/* Returns true iff A divides B. */
static inline bool
-tree_fold_divides_p (tree type,
- tree a,
+tree_fold_divides_p (tree a,
tree b)
{
/* Determines whether (A == gcd (A, B)). */
- return integer_zerop
- (fold_build2 (MINUS_EXPR, type, a, tree_fold_gcd (a, b)));
+ return tree_int_cst_equal (a, tree_fold_gcd (a, b));
}
/* Compute the greatest common denominator of two numbers using
*constant = constant_0 ? constant_0 : constant_1;
if (invariant_0 && invariant_1)
*invariant =
- fold (build (code, TREE_TYPE (invariant_0), invariant_0, invariant_1));
+ fold_build2 (code, TREE_TYPE (invariant_0), invariant_0, invariant_1);
else
*invariant = invariant_0 ? invariant_0 : invariant_1;
}
if (constant)
{
DR_INIT (dr) = fold_convert (ssizetype, constant);
- init_cond = fold (build (TRUNC_DIV_EXPR, TREE_TYPE (constant),
- constant, type_size));
+ init_cond = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (constant),
+ constant, type_size);
}
else
DR_INIT (dr) = init_cond = ssize_int (0);;
chrec_b = {10, +, 1}
*/
- if (tree_fold_divides_p
- (integer_type_node, CHREC_RIGHT (chrec_b), difference))
+ if (tree_fold_divides_p (CHREC_RIGHT (chrec_b), difference))
{
*overlaps_a = integer_zero_node;
*overlaps_b = fold_build2 (EXACT_DIV_EXPR, integer_type_node,
return;
}
- /* When the step does not divides the difference, there are
+ /* When the step does not divide the difference, there are
no overlaps. */
else
{
chrec_a = 3
chrec_b = {10, +, -1}
*/
- if (tree_fold_divides_p
- (integer_type_node, CHREC_RIGHT (chrec_b), difference))
+ if (tree_fold_divides_p (CHREC_RIGHT (chrec_b), difference))
{
*overlaps_a = integer_zero_node;
- *overlaps_b = fold
- (build (EXACT_DIV_EXPR, integer_type_node, difference,
- CHREC_RIGHT (chrec_b)));
+ *overlaps_b = fold_build2 (EXACT_DIV_EXPR,
+ integer_type_node, difference,
+ CHREC_RIGHT (chrec_b));
*last_conflicts = integer_one_node;
return;
}
switch (TREE_CODE (chrec))
{
case POLYNOMIAL_CHREC:
- return (tree_fold_divides_p (integer_type_node, CHREC_RIGHT (chrec), cst)
+ return (tree_fold_divides_p (CHREC_RIGHT (chrec), cst)
&& chrec_steps_divide_constant_p (CHREC_LEFT (chrec), cst));
default:
for (i = 0, mask = 0; i < field_size; i++)
mask |= ((HOST_WIDE_INT) 1) << i;
- wide_val = build2 (BIT_AND_EXPR, TREE_TYPE (var), val,
- build_int_cst (TREE_TYPE (var), mask));
+ wide_val = fold_build2 (BIT_AND_EXPR, TREE_TYPE (var), val,
+ build_int_cst (TREE_TYPE (var), mask));
}
else
{
for (i = 0, mask = 0; i < (var_size - field_size); i++)
mask |= ((HOST_WIDE_INT) 1) << (var_size - i - 1);
- wide_val = build2 (BIT_IOR_EXPR, TREE_TYPE (var), val,
- build_int_cst (TREE_TYPE (var), mask));
+ wide_val = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (var), val,
+ build_int_cst (TREE_TYPE (var), mask));
}
- return fold (wide_val);
+ return wide_val;
}
rslt = build_int_cst_type (type, 1);
for (; ctr; ctr--)
{
- rslt = fold_binary_to_constant (MULT_EXPR, type, rslt, x);
- x = fold_binary_to_constant (MULT_EXPR, type, x, x);
+ rslt = int_const_binop (MULT_EXPR, rslt, x, 0);
+ x = int_const_binop (MULT_EXPR, x, x, 0);
}
- rslt = fold_binary_to_constant (BIT_AND_EXPR, type, rslt, mask);
+ rslt = int_const_binop (BIT_AND_EXPR, rslt, mask, 0);
}
return rslt;
utype = unsigned_type_for (type);
if (tree_int_cst_lt (step, integer_zero_node))
- diff = fold (build2 (MINUS_EXPR, utype, init,
- TYPE_MIN_VALUE (type)));
+ diff = fold_build2 (MINUS_EXPR, utype, init,
+ TYPE_MIN_VALUE (type));
else
- diff = fold (build2 (MINUS_EXPR, utype,
- TYPE_MAX_VALUE (type), init));
+ diff = fold_build2 (MINUS_EXPR, utype,
+ TYPE_MAX_VALUE (type), init);
- estimation = fold (build2 (CEIL_DIV_EXPR, utype, diff,
- step));
+ estimation = fold_build2 (CEIL_DIV_EXPR, utype, diff,
+ step);
record_estimate (loop, estimation, boolean_true_node, stmt);
}