+2011-05-04 Richard Guenther <rguenther@suse.de>
+
+ * tree.h (int_const_binop): Remove notrunc argument.
+ * fold-const.c (int_const_binop): Remove notrunc argument. Always
+ create integer constants that are properly truncated.
+ (extract_muldiv_1): Expand one notrunc int_const_binop caller.
+ (const_binop): Remove zero notrunc argument to int_const_binop.
+ (size_binop_loc): Likewise.
+ (fold_div_compare): Likewise.
+ (maybe_canonicalize_comparison_1): Likewise.
+ (fold_comparison): Likewise.
+ (fold_binary_loc): Likewise.
+ (multiple_of_p): Likewise.
+ * expr.c (store_constructor): Likewise.
+ * gimple-fold.c (maybe_fold_offset_to_array_ref): Likewise.
+ (maybe_fold_stmt_addition): Likewise.
+ * ipa-prop.c (ipa_modify_call_arguments): Likewise.
+ * stor-layout.c (layout_type): Likewise.
+ * tree-data-ref.c (tree_fold_divides_p): Likewise.
+ * tree-sra.c (build_ref_for_offset): Likewise.
+ (build_user_friendly_ref_for_offset): Likewise.
+ * tree-ssa-address.c (maybe_fold_tmr): Likewise.
+ * tree-ssa-forwprop.c (forward_propagate_addr_expr_1): Likewise.
+ * tree-ssa-loop-niter.c (inverse): Likewise.
+ * tree-ssa-pre.c (create_component_ref_by_pieces_1): Likewise.
+ * tree-ssa.c (maybe_rewrite_mem_ref_base): Likewise.
+ * tree-switch-conversion.c (check_range): Likewise.
+ (build_constructors): Likewise.
+ * tree-vect-generic.c (expand_vector_piecewise): Likewise.
+ * tree-vrp.c (set_and_canonicalize_value_range): Likewise.
+ (extract_range_from_assert): Likewise.
+ (vrp_int_const_binop): Likewise.
+ (extract_range_from_binary_expr): Likewise.
+ (extract_range_from_unary_expr): Likewise.
+ (check_array_ref): Likewise.
+ (find_case_label_range): Likewise.
+ (simplify_div_or_mod_using_ranges): Likewise.
+ * tree-cfg.c (group_case_labels_stmt): Use double-ints for
+ comparing case labels for merging.
+
2011-05-03 Mark Wielaard <mjw@redhat.com>
* dwarf2out.c (debug_str_hash_forced): Removed.
+2011-05-04 Richard Guenther <rguenther@suse.de>
+
+ * gcc-interface/trans.c (gnat_to_gnu): Remove zero notrunc argument to
+ int_const_binop.
+ (pos_to_constructor): Likewise.
+
2011-05-03 Nathan Froyd <froydnj@codesourcery.com>
Eric Botcazou <ebotcazou@adacore.com>
Get_String_Char (gnat_string, i + 1));
CONSTRUCTOR_APPEND_ELT (gnu_vec, gnu_idx, t);
- gnu_idx = int_const_binop (PLUS_EXPR, gnu_idx, integer_one_node,
- 0);
+ gnu_idx = int_const_binop (PLUS_EXPR, gnu_idx, integer_one_node);
}
gnu_result = gnat_build_constructor (gnu_result_type, gnu_vec);
CONSTRUCTOR_APPEND_ELT (gnu_expr_vec, gnu_index,
convert (TREE_TYPE (gnu_array_type), gnu_expr));
- gnu_index = int_const_binop (PLUS_EXPR, gnu_index, integer_one_node, 0);
+ gnu_index = int_const_binop (PLUS_EXPR, gnu_index, integer_one_node);
}
return gnat_build_constructor (gnu_array_type, gnu_expr_vec);
int n_elts_here = tree_low_cst
(int_const_binop (TRUNC_DIV_EXPR,
TYPE_SIZE (TREE_TYPE (value)),
- TYPE_SIZE (elttype), 0), 1);
+ TYPE_SIZE (elttype)), 1);
count += n_elts_here;
if (mostly_zeros_p (value))
/* Combine two integer constants ARG1 and ARG2 under operation CODE
to produce a new constant. Return NULL_TREE if we don't know how
- to evaluate CODE at compile-time.
-
- If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
+ to evaluate CODE at compile-time. */
tree
-int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
+int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
{
double_int op1, op2, res, tmp;
tree t;
return NULL_TREE;
}
- if (notrunc)
- {
- t = build_int_cst_wide (TREE_TYPE (arg1), res.low, res.high);
-
- /* Propagate overflow flags ourselves. */
- if (((!uns || is_sizetype) && overflow)
- | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
- {
- t = copy_node (t);
- TREE_OVERFLOW (t) = 1;
- }
- }
- else
- t = force_fit_type_double (TREE_TYPE (arg1), res, 1,
- ((!uns || is_sizetype) && overflow)
- | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
+ t = force_fit_type_double (TREE_TYPE (arg1), res, 1,
+ ((!uns || is_sizetype) && overflow)
+ | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
return t;
}
STRIP_NOPS (arg2);
if (TREE_CODE (arg1) == INTEGER_CST)
- return int_const_binop (code, arg1, arg2, 0);
+ return int_const_binop (code, arg1, arg2);
if (TREE_CODE (arg1) == REAL_CST)
{
}
/* Handle general case of two integer constants. */
- return int_const_binop (code, arg0, arg1, 0);
+ return int_const_binop (code, arg0, arg1);
}
return fold_build2_loc (loc, code, type, arg0, arg1);
/* If these are the same operation types, we can associate them
assuming no overflow. */
- if (tcode == code
- && 0 != (t1 = int_const_binop (MULT_EXPR,
- fold_convert (ctype, op1),
- fold_convert (ctype, c), 1))
- && 0 != (t1 = force_fit_type_double (ctype, tree_to_double_int (t1),
- (TYPE_UNSIGNED (ctype)
- && tcode != MULT_EXPR) ? -1 : 1,
- TREE_OVERFLOW (t1)))
- && !TREE_OVERFLOW (t1))
- return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
+ if (tcode == code)
+ {
+ double_int mul;
+ int overflow_p;
+ mul = double_int_mul_with_sign
+ (double_int_ext
+ (tree_to_double_int (op1),
+ TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
+ double_int_ext
+ (tree_to_double_int (c),
+ TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
+ false, &overflow_p);
+ overflow_p = (((!TYPE_UNSIGNED (ctype)
+ || (TREE_CODE (ctype) == INTEGER_TYPE
+ && TYPE_IS_SIZETYPE (ctype)))
+ && overflow_p)
+ | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
+ if (!double_int_fits_to_tree_p (ctype, mul)
+ && ((TYPE_UNSIGNED (ctype) && tcode != MULT_EXPR)
+ || !TYPE_UNSIGNED (ctype)
+ || (TREE_CODE (ctype) == INTEGER_TYPE
+ && TYPE_IS_SIZETYPE (ctype))))
+ overflow_p = 1;
+ if (!overflow_p)
+ return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
+ double_int_to_tree (ctype, mul));
+ }
/* If these operations "cancel" each other, we have the main
optimizations of this pass, which occur when either constant is a
int overflow;
/* We have to do this the hard way to detect unsigned overflow.
- prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
+ prod = int_const_binop (MULT_EXPR, arg01, arg1); */
overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
TREE_INT_CST_HIGH (arg01),
TREE_INT_CST_LOW (arg1),
if (unsigned_p)
{
tmp = int_const_binop (MINUS_EXPR, arg01,
- build_int_cst (TREE_TYPE (arg01), 1), 0);
+ build_int_cst (TREE_TYPE (arg01), 1));
lo = prod;
- /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
+ /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
TREE_INT_CST_HIGH (prod),
TREE_INT_CST_LOW (tmp),
else if (tree_int_cst_sgn (arg01) >= 0)
{
tmp = int_const_binop (MINUS_EXPR, arg01,
- build_int_cst (TREE_TYPE (arg01), 1), 0);
+ build_int_cst (TREE_TYPE (arg01), 1));
switch (tree_int_cst_sgn (arg1))
{
case -1:
neg_overflow = true;
- lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
+ lo = int_const_binop (MINUS_EXPR, prod, tmp);
hi = prod;
break;
break;
case 1:
- hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
+ hi = int_const_binop (PLUS_EXPR, prod, tmp);
lo = prod;
break;
code = swap_tree_comparison (code);
tmp = int_const_binop (PLUS_EXPR, arg01,
- build_int_cst (TREE_TYPE (arg01), 1), 0);
+ build_int_cst (TREE_TYPE (arg01), 1));
switch (tree_int_cst_sgn (arg1))
{
case -1:
- hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
+ hi = int_const_binop (MINUS_EXPR, prod, tmp);
lo = prod;
break;
case 1:
neg_overflow = true;
- lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
+ lo = int_const_binop (PLUS_EXPR, prod, tmp);
hi = prod;
break;
return NULL_TREE;
t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
- cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
+ cst0, build_int_cst (TREE_TYPE (cst0), 1));
if (code0 != INTEGER_CST)
t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
of lower absolute value than before. */
cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
? MINUS_EXPR : PLUS_EXPR,
- const2, const1, 0);
+ const2, const1);
if (!TREE_OVERFLOW (cst)
&& tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
{
cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
? MINUS_EXPR : PLUS_EXPR,
- const1, const2, 0);
+ const1, const2);
if (!TREE_OVERFLOW (cst)
&& tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
{
return fold_build2 (MEM_REF, type,
TREE_OPERAND (iref, 0),
int_const_binop (PLUS_EXPR, arg1,
- TREE_OPERAND (iref, 1), 0));
+ TREE_OPERAND (iref, 1)));
}
/* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
return fold_build2 (MEM_REF, type,
build_fold_addr_expr (base),
int_const_binop (PLUS_EXPR, arg1,
- size_int (coffset), 0));
+ size_int (coffset)));
}
return NULL_TREE;
arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
lshift = build_int_cst (type, -1);
- lshift = int_const_binop (code, lshift, arg1, 0);
+ lshift = int_const_binop (code, lshift, arg1);
return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
}
|| tree_int_cst_sgn (bottom) < 0)))
return 0;
return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
- top, bottom, 0));
+ top, bottom));
default:
return 0;
+2011-05-04 Richard Guenther <rguenther@suse.de>
+
+ * trans-types.c (gfc_get_array_type_bounds): Remove zero notrunc
+ argument to int_const_binop.
+
2011-05-03 Tobias Burnus <burnus@net-b.de>
PR fortran/18918
if (stride)
rtype = build_range_type (gfc_array_index_type, gfc_index_zero_node,
int_const_binop (MINUS_EXPR, stride,
- integer_one_node, 0));
+ integer_one_node));
else
rtype = gfc_array_range_type;
arraytype = build_array_type (etype, rtype);
|| TREE_CODE (elt_offset) != INTEGER_CST)
return NULL_TREE;
- elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
+ elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound);
base = TREE_OPERAND (base, 0);
}
}
if (!integer_zerop (min_idx))
- idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
+ idx = int_const_binop (PLUS_EXPR, idx, min_idx);
if (!integer_zerop (elt_offset))
- idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
+ idx = int_const_binop (PLUS_EXPR, idx, elt_offset);
/* Make sure to possibly truncate late after offsetting. */
idx = fold_convert (idx_type, idx);
array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
if (!integer_zerop (min_idx))
array_idx = int_const_binop (MINUS_EXPR, array_idx,
- min_idx, 0);
+ min_idx);
}
}
/* Convert the index to a byte offset. */
array_idx = fold_convert (sizetype, array_idx);
- array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
+ array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size);
/* Update the operands for the next round, or for folding. */
op1 = int_const_binop (PLUS_EXPR,
- array_idx, op1, 0);
+ array_idx, op1);
op0 = array_obj;
}
base_offset
+ adj->offset / BITS_PER_UNIT);
off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
- off, 0);
+ off);
base = TREE_OPERAND (base, 0);
}
else
TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type));
TYPE_SIZE_UNIT (type) = int_const_binop (MULT_EXPR,
TYPE_SIZE_UNIT (innertype),
- size_int (nunits), 0);
+ size_int (nunits));
TYPE_SIZE (type) = int_const_binop (MULT_EXPR, TYPE_SIZE (innertype),
- bitsize_int (nunits), 0);
+ bitsize_int (nunits));
/* Always naturally align vectors. This prevents ABI changes
depending on whether or not native vector modes are supported. */
{
tree merge_case = gimple_switch_label (stmt, i);
tree merge_label = CASE_LABEL (merge_case);
- tree t = int_const_binop (PLUS_EXPR, base_high,
- integer_one_node, 1);
+ double_int bhp1 = double_int_add (tree_to_double_int (base_high),
+ double_int_one);
/* Merge the cases if they jump to the same place,
and their ranges are consecutive. */
if (merge_label == base_label
- && tree_int_cst_equal (CASE_LOW (merge_case), t))
+ && double_int_equal_p (tree_to_double_int (CASE_LOW (merge_case)),
+ bhp1))
{
base_high = CASE_HIGH (merge_case) ?
CASE_HIGH (merge_case) : CASE_LOW (merge_case);
{
gcc_assert (TREE_CODE (a) == INTEGER_CST);
gcc_assert (TREE_CODE (b) == INTEGER_CST);
- return integer_zerop (int_const_binop (TRUNC_MOD_EXPR, b, a, 0));
+ return integer_zerop (int_const_binop (TRUNC_MOD_EXPR, b, a));
}
/* Returns true iff A divides B. */
{
off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
base_offset + offset / BITS_PER_UNIT);
- off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off, 0);
+ off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
base = unshare_expr (TREE_OPERAND (base, 0));
}
else
return false;
index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
if (!integer_zerop (minidx))
- index = int_const_binop (PLUS_EXPR, index, minidx, 0);
+ index = int_const_binop (PLUS_EXPR, index, minidx);
*res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
NULL_TREE, NULL_TREE);
offset = offset % el_size;
(get_addr_base_and_unit_offset
(TREE_OPERAND (addr.symbol, 0), &offset));
addr.offset = int_const_binop (PLUS_EXPR,
- addr.offset, size_int (offset), 0);
+ addr.offset, size_int (offset));
changed = true;
}
new_base = TREE_OPERAND (*def_rhs_basep, 0);
new_offset
= int_const_binop (PLUS_EXPR, TREE_OPERAND (lhs, 1),
- TREE_OPERAND (*def_rhs_basep, 1), 0);
+ TREE_OPERAND (*def_rhs_basep, 1));
}
else
{
new_base = TREE_OPERAND (*def_rhs_basep, 0);
new_offset
= int_const_binop (PLUS_EXPR, TREE_OPERAND (rhs, 1),
- TREE_OPERAND (*def_rhs_basep, 1), 0);
+ TREE_OPERAND (*def_rhs_basep, 1));
}
else
{
rslt = build_int_cst (type, 1);
for (; ctr; ctr--)
{
- rslt = int_const_binop (MULT_EXPR, rslt, x, 0);
- x = int_const_binop (MULT_EXPR, x, x, 0);
+ rslt = int_const_binop (MULT_EXPR, rslt, x);
+ x = int_const_binop (MULT_EXPR, x, x);
}
- rslt = int_const_binop (BIT_AND_EXPR, rslt, mask, 0);
+ rslt = int_const_binop (BIT_AND_EXPR, rslt, mask);
}
return rslt;
gcc_assert (base);
offset = int_const_binop (PLUS_EXPR, offset,
build_int_cst (TREE_TYPE (offset),
- off), 0);
+ off));
baseop = build_fold_addr_expr (base);
}
return fold_build2 (MEM_REF, currop->type, baseop, offset);
TYPE_SIZE (TREE_TYPE (*tp)),
int_const_binop (MULT_EXPR,
bitsize_int (BITS_PER_UNIT),
- TREE_OPERAND (*tp, 1), 0));
+ TREE_OPERAND (*tp, 1)));
}
else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE
&& useless_type_conversion_p (TREE_TYPE (*tp),
gcc_assert (info.range_min);
gcc_assert (range_max);
- info.range_size = int_const_binop (MINUS_EXPR, range_max, info.range_min, 0);
+ info.range_size = int_const_binop (MINUS_EXPR, range_max, info.range_min);
gcc_assert (info.range_size);
if (!host_integerp (info.range_size, 1))
elt = VEC_quick_push (constructor_elt,
info.constructors[k], NULL);
elt->index = int_const_binop (MINUS_EXPR, pos,
- info.range_min, 0);
+ info.range_min);
elt->value = info.default_values[k];
}
- pos = int_const_binop (PLUS_EXPR, pos, integer_one_node, 0);
+ pos = int_const_binop (PLUS_EXPR, pos, integer_one_node);
}
gcc_assert (tree_int_cst_equal (pos, CASE_LOW (cs)));
elt = VEC_quick_push (constructor_elt,
info.constructors[j], NULL);
- elt->index = int_const_binop (MINUS_EXPR, pos, info.range_min, 0);
+ elt->index = int_const_binop (MINUS_EXPR, pos, info.range_min);
elt->value = val;
- pos = int_const_binop (PLUS_EXPR, pos, integer_one_node, 0);
+ pos = int_const_binop (PLUS_EXPR, pos, integer_one_node);
} while (!tree_int_cst_lt (high, pos)
&& tree_int_cst_lt (low, pos));
j++;
v = VEC_alloc(constructor_elt, gc, (nunits + delta - 1) / delta);
for (i = 0; i < nunits;
- i += delta, index = int_const_binop (PLUS_EXPR, index, part_width, 0))
+ i += delta, index = int_const_binop (PLUS_EXPR, index, part_width))
{
tree result = f (gsi, inner_type, a, b, index, part_width, code);
constructor_elt *ce = VEC_quick_push (constructor_elt, v, NULL);
if (tree_int_cst_lt (max, min))
{
tree one = build_int_cst (TREE_TYPE (min), 1);
- tree tmp = int_const_binop (PLUS_EXPR, max, one, 0);
- max = int_const_binop (MINUS_EXPR, min, one, 0);
+ tree tmp = int_const_binop (PLUS_EXPR, max, one);
+ max = int_const_binop (MINUS_EXPR, min, one);
min = tmp;
/* There's one corner case, if we had [C+1, C] before we now have
&& integer_zerop (max)))
{
tree one = build_int_cst (TREE_TYPE (max), 1);
- min = int_const_binop (PLUS_EXPR, max, one, 0);
+ min = int_const_binop (PLUS_EXPR, max, one);
max = vrp_val_max (TREE_TYPE (max));
t = VR_RANGE;
}
else if (is_max)
{
tree one = build_int_cst (TREE_TYPE (min), 1);
- max = int_const_binop (MINUS_EXPR, min, one, 0);
+ max = int_const_binop (MINUS_EXPR, min, one);
min = vrp_val_min (TREE_TYPE (min));
t = VR_RANGE;
}
{
min = fold_build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (cond, 1)),
TREE_OPERAND (cond, 1));
- max = int_const_binop (PLUS_EXPR, limit, min, 0);
+ max = int_const_binop (PLUS_EXPR, limit, min);
cond = TREE_OPERAND (cond, 0);
}
else
{
tree res;
- res = int_const_binop (code, val1, val2, 0);
+ res = int_const_binop (code, val1, val2);
/* If we are using unsigned arithmetic, operate symbolically
on -INF and +INF as int_const_binop only handles signed overflow. */
{
tree tmp = int_const_binop (TRUNC_DIV_EXPR,
res,
- val1, 0);
+ val1);
int check = compare_values (tmp, val2);
if (check != 0)
max = fold_unary_to_constant (ABS_EXPR, TREE_TYPE (vr1.min), vr1.min);
if (tree_int_cst_lt (max, vr1.max))
max = vr1.max;
- max = int_const_binop (MINUS_EXPR, max, integer_one_node, 0);
+ max = int_const_binop (MINUS_EXPR, max, integer_one_node);
/* If the dividend is non-negative the modulus will be
non-negative as well. */
if (TYPE_UNSIGNED (TREE_TYPE (max))
type = VR_RANGE;
if (vr0_int_cst_singleton_p && vr1_int_cst_singleton_p)
- min = max = int_const_binop (code, vr0.max, vr1.max, 0);
+ min = max = int_const_binop (code, vr0.max, vr1.max);
else if (!int_cst_range0 && !int_cst_range1)
{
set_value_range_to_varying (vr);
&& (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
|| (vr0.type == VR_RANGE
&& integer_zerop (int_const_binop (RSHIFT_EXPR,
- int_const_binop (MINUS_EXPR, vr0.max, vr0.min, 0),
- size_int (TYPE_PRECISION (outer_type)), 0)))))
+ int_const_binop (MINUS_EXPR, vr0.max, vr0.min),
+ size_int (TYPE_PRECISION (outer_type)))))))
{
tree new_min, new_max;
new_min = force_fit_type_double (outer_type,
min = (vr0.min != type_min_value
? int_const_binop (PLUS_EXPR, type_min_value,
- integer_one_node, 0)
+ integer_one_node)
: type_min_value);
}
else
}
low_bound = array_ref_low_bound (ref);
- up_bound_p1 = int_const_binop (PLUS_EXPR, up_bound, integer_one_node, 0);
+ up_bound_p1 = int_const_binop (PLUS_EXPR, up_bound, integer_one_node);
if (TREE_CODE (low_sub) == SSA_NAME)
{
for (k = i + 1; k <= j; ++k)
{
low = CASE_LOW (gimple_switch_label (stmt, k));
- if (!integer_onep (int_const_binop (MINUS_EXPR, low, high, 0)))
+ if (!integer_onep (int_const_binop (MINUS_EXPR, low, high)))
{
take_default = true;
break;
else
{
t = build_int_cst (TREE_TYPE (op1), 1);
- t = int_const_binop (MINUS_EXPR, op1, t, 0);
+ t = int_const_binop (MINUS_EXPR, op1, t);
t = fold_convert (TREE_TYPE (op0), t);
gimple_assign_set_rhs_code (stmt, BIT_AND_EXPR);
extern tree fold_unary_to_constant (enum tree_code, tree, tree);
extern tree fold_binary_to_constant (enum tree_code, tree, tree, tree);
extern tree fold_read_from_constant_string (tree);
-extern tree int_const_binop (enum tree_code, const_tree, const_tree, int);
+extern tree int_const_binop (enum tree_code, const_tree, const_tree);
#define build_fold_addr_expr(T)\
build_fold_addr_expr_loc (UNKNOWN_LOCATION, (T))
extern tree build_fold_addr_expr_loc (location_t, tree);