/* Fold a constant sub-tree into a single node for C-compiler
- Copyright (C) 1987-2013 Free Software Foundation, Inc.
+ Copyright (C) 1987-2014 Free Software Foundation, Inc.
This file is part of GCC.
#include "tm.h"
#include "flags.h"
#include "tree.h"
+#include "stor-layout.h"
+#include "calls.h"
+#include "tree-iterator.h"
#include "realmpfr.h"
#include "rtl.h"
#include "expr.h"
#include "target.h"
#include "diagnostic-core.h"
#include "intl.h"
-#include "ggc.h"
-#include "hash-table.h"
#include "langhooks.h"
#include "md5.h"
+#include "predict.h"
+#include "vec.h"
+#include "hashtab.h"
+#include "hash-set.h"
+#include "machmode.h"
+#include "hard-reg-set.h"
+#include "input.h"
+#include "function.h"
+#include "basic-block.h"
+#include "tree-ssa-alias.h"
+#include "internal-fn.h"
+#include "tree-eh.h"
+#include "gimple-expr.h"
+#include "is-a.h"
#include "gimple.h"
-#include "tree-ssa.h"
+#include "gimplify.h"
+#include "tree-dfa.h"
+#include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
+#include "builtins.h"
+#include "hash-map.h"
+#include "plugin-api.h"
+#include "ipa-ref.h"
+#include "cgraph.h"
+#include "generic-match.h"
+#include "optabs.h"
/* Nonzero if we are folding constants inside an initializer; zero
otherwise. */
static int operand_equal_for_comparison_p (tree, tree, tree);
static int twoval_comparison_p (tree, tree *, tree *, int *);
static tree eval_subst (location_t, tree, tree, tree, tree, tree);
-static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
static tree make_bit_field_ref (location_t, tree, tree,
HOST_WIDE_INT, HOST_WIDE_INT, int);
tree, tree, tree);
static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
HOST_WIDE_INT *,
- enum machine_mode *, int *, int *,
+ machine_mode *, int *, int *,
tree *, tree *);
-static int all_ones_mask_p (const_tree, int);
static tree sign_bit_p (tree, const_tree);
static int simple_operand_p (const_tree);
static bool simple_operand_p_2 (tree);
return x;
}
\f
-/* If ARG2 divides ARG1 with zero remainder, carries out the division
- of type CODE and returns the quotient.
- Otherwise returns NULL_TREE. */
+/* If ARG2 divides ARG1 with zero remainder, carries out the exact
+ division and returns the quotient. Otherwise returns
+ NULL_TREE. */
tree
-div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
+div_if_zero_remainder (const_tree arg1, const_tree arg2)
{
- double_int quo, rem;
- int uns;
+ widest_int quo;
- /* The sign of the division is according to operand two, that
- does the correct thing for POINTER_PLUS_EXPR where we want
- a signed division. */
- uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
-
- quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
- uns, code, &rem);
-
- if (rem.is_zero ())
- return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
+ if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
+ SIGNED, &quo))
+ return wide_int_to_tree (TREE_TYPE (arg1), quo);
return NULL_TREE;
}
bool
may_negate_without_overflow_p (const_tree t)
{
- unsigned HOST_WIDE_INT val;
- unsigned int prec;
tree type;
gcc_assert (TREE_CODE (t) == INTEGER_CST);
if (TYPE_UNSIGNED (type))
return false;
- prec = TYPE_PRECISION (type);
- if (prec > HOST_BITS_PER_WIDE_INT)
- {
- if (TREE_INT_CST_LOW (t) != 0)
- return true;
- prec -= HOST_BITS_PER_WIDE_INT;
- val = TREE_INT_CST_HIGH (t);
- }
- else
- val = TREE_INT_CST_LOW (t);
- if (prec < HOST_BITS_PER_WIDE_INT)
- val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
- return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
+ return !wi::only_sign_bit_p (t);
}
/* Determine whether an expression T can be cheaply negated using
case TRUNC_DIV_EXPR:
case ROUND_DIV_EXPR:
- case FLOOR_DIV_EXPR:
- case CEIL_DIV_EXPR:
case EXACT_DIV_EXPR:
/* In general we can't negate A / B, because if A is INT_MIN and
B is 1, we may turn this into INT_MIN / -1 which is undefined
break;
case RSHIFT_EXPR:
- /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
+ /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
{
tree op1 = TREE_OPERAND (t, 1);
- if (TREE_INT_CST_HIGH (op1) == 0
- && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
- == TREE_INT_CST_LOW (op1))
+ if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
return true;
}
break;
case TRUNC_DIV_EXPR:
case ROUND_DIV_EXPR:
- case FLOOR_DIV_EXPR:
- case CEIL_DIV_EXPR:
case EXACT_DIV_EXPR:
/* In general we can't negate A / B, because if A is INT_MIN and
B is 1, we may turn this into INT_MIN / -1 which is undefined
break;
case RSHIFT_EXPR:
- /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
+ /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
{
tree op1 = TREE_OPERAND (t, 1);
- if (TREE_INT_CST_HIGH (op1) == 0
- && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
- == TREE_INT_CST_LOW (op1))
+ if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
{
tree ntype = TYPE_UNSIGNED (type)
? signed_type_for (type)
to evaluate CODE at compile-time. */
static tree
-int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
+int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
int overflowable)
{
- double_int op1, op2, res, tmp;
+ wide_int res;
tree t;
tree type = TREE_TYPE (arg1);
- bool uns = TYPE_UNSIGNED (type);
+ signop sign = TYPE_SIGN (type);
bool overflow = false;
- op1 = tree_to_double_int (arg1);
- op2 = tree_to_double_int (arg2);
+ wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
+ TYPE_SIGN (TREE_TYPE (parg2)));
switch (code)
{
case BIT_IOR_EXPR:
- res = op1 | op2;
+ res = wi::bit_or (arg1, arg2);
break;
case BIT_XOR_EXPR:
- res = op1 ^ op2;
+ res = wi::bit_xor (arg1, arg2);
break;
case BIT_AND_EXPR:
- res = op1 & op2;
+ res = wi::bit_and (arg1, arg2);
break;
case RSHIFT_EXPR:
- res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
- break;
-
case LSHIFT_EXPR:
- /* It's unclear from the C standard whether shifts can overflow.
- The following code ignores overflow; perhaps a C standard
- interpretation ruling is needed. */
- res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
- break;
+ if (wi::neg_p (arg2))
+ {
+ arg2 = -arg2;
+ if (code == RSHIFT_EXPR)
+ code = LSHIFT_EXPR;
+ else
+ code = RSHIFT_EXPR;
+ }
- case RROTATE_EXPR:
- res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
+ if (code == RSHIFT_EXPR)
+ /* It's unclear from the C standard whether shifts can overflow.
+ The following code ignores overflow; perhaps a C standard
+ interpretation ruling is needed. */
+ res = wi::rshift (arg1, arg2, sign);
+ else
+ res = wi::lshift (arg1, arg2);
break;
+ case RROTATE_EXPR:
case LROTATE_EXPR:
- res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
+ if (wi::neg_p (arg2))
+ {
+ arg2 = -arg2;
+ if (code == RROTATE_EXPR)
+ code = LROTATE_EXPR;
+ else
+ code = RROTATE_EXPR;
+ }
+
+ if (code == RROTATE_EXPR)
+ res = wi::rrotate (arg1, arg2);
+ else
+ res = wi::lrotate (arg1, arg2);
break;
case PLUS_EXPR:
- res = op1.add_with_sign (op2, false, &overflow);
+ res = wi::add (arg1, arg2, sign, &overflow);
break;
case MINUS_EXPR:
- res = op1.sub_with_overflow (op2, &overflow);
+ res = wi::sub (arg1, arg2, sign, &overflow);
break;
case MULT_EXPR:
- res = op1.mul_with_sign (op2, false, &overflow);
+ res = wi::mul (arg1, arg2, sign, &overflow);
break;
case MULT_HIGHPART_EXPR:
- if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
- {
- bool dummy_overflow;
- if (TYPE_PRECISION (type) != 2 * HOST_BITS_PER_WIDE_INT)
- return NULL_TREE;
- op1.wide_mul_with_sign (op2, uns, &res, &dummy_overflow);
- }
- else
- {
- bool dummy_overflow;
- /* MULT_HIGHPART_EXPR can't ever oveflow, as the multiplication
- is performed in twice the precision of arguments. */
- tmp = op1.mul_with_sign (op2, false, &dummy_overflow);
- res = tmp.rshift (TYPE_PRECISION (type),
- 2 * TYPE_PRECISION (type), !uns);
- }
+ res = wi::mul_high (arg1, arg2, sign);
break;
case TRUNC_DIV_EXPR:
- case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
case EXACT_DIV_EXPR:
- /* This is a shortcut for a common special case. */
- if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
- && !TREE_OVERFLOW (arg1)
- && !TREE_OVERFLOW (arg2)
- && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
- {
- if (code == CEIL_DIV_EXPR)
- op1.low += op2.low - 1;
+ if (arg2 == 0)
+ return NULL_TREE;
+ res = wi::div_trunc (arg1, arg2, sign, &overflow);
+ break;
- res.low = op1.low / op2.low, res.high = 0;
- break;
- }
+ case FLOOR_DIV_EXPR:
+ if (arg2 == 0)
+ return NULL_TREE;
+ res = wi::div_floor (arg1, arg2, sign, &overflow);
+ break;
- /* ... fall through ... */
+ case CEIL_DIV_EXPR:
+ if (arg2 == 0)
+ return NULL_TREE;
+ res = wi::div_ceil (arg1, arg2, sign, &overflow);
+ break;
case ROUND_DIV_EXPR:
- if (op2.is_zero ())
+ if (arg2 == 0)
return NULL_TREE;
- if (op2.is_one ())
- {
- res = op1;
- break;
- }
- if (op1 == op2 && !op1.is_zero ())
- {
- res = double_int_one;
- break;
- }
- res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
+ res = wi::div_round (arg1, arg2, sign, &overflow);
break;
case TRUNC_MOD_EXPR:
- case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
- /* This is a shortcut for a common special case. */
- if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
- && !TREE_OVERFLOW (arg1)
- && !TREE_OVERFLOW (arg2)
- && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
- {
- if (code == CEIL_MOD_EXPR)
- op1.low += op2.low - 1;
- res.low = op1.low % op2.low, res.high = 0;
- break;
- }
+ if (arg2 == 0)
+ return NULL_TREE;
+ res = wi::mod_trunc (arg1, arg2, sign, &overflow);
+ break;
- /* ... fall through ... */
+ case FLOOR_MOD_EXPR:
+ if (arg2 == 0)
+ return NULL_TREE;
+ res = wi::mod_floor (arg1, arg2, sign, &overflow);
+ break;
+
+ case CEIL_MOD_EXPR:
+ if (arg2 == 0)
+ return NULL_TREE;
+ res = wi::mod_ceil (arg1, arg2, sign, &overflow);
+ break;
case ROUND_MOD_EXPR:
- if (op2.is_zero ())
+ if (arg2 == 0)
return NULL_TREE;
- tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
+ res = wi::mod_round (arg1, arg2, sign, &overflow);
break;
case MIN_EXPR:
- res = op1.min (op2, uns);
+ res = wi::min (arg1, arg2, sign);
break;
case MAX_EXPR:
- res = op1.max (op2, uns);
+ res = wi::max (arg1, arg2, sign);
break;
default:
return NULL_TREE;
}
- t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
- (!uns && overflow)
- | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
+ t = force_fit_type (type, res, overflowable,
+ (((sign == SIGNED || overflowable == -1)
+ && overflow)
+ | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
return t;
}
if (TREE_CODE (arg1) == REAL_CST)
{
- enum machine_mode mode;
+ machine_mode mode;
REAL_VALUE_TYPE d1;
REAL_VALUE_TYPE d2;
REAL_VALUE_TYPE value;
case LSHIFT_EXPR:
case RSHIFT_EXPR:
- f2.data.high = TREE_INT_CST_HIGH (arg2);
- f2.data.low = TREE_INT_CST_LOW (arg2);
- f2.mode = SImode;
+ {
+ wide_int w2 = arg2;
+ f2.data.high = w2.elt (1);
+ f2.data.low = w2.elt (0);
+ f2.mode = SImode;
+ }
break;
default:
int count = TYPE_VECTOR_SUBPARTS (type), i;
tree *elts = XALLOCAVEC (tree, count);
- if (code == VEC_LSHIFT_EXPR
- || code == VEC_RSHIFT_EXPR)
+ if (code == VEC_RSHIFT_EXPR)
{
- if (!host_integerp (arg2, 1))
+ if (!tree_fits_uhwi_p (arg2))
return NULL_TREE;
- unsigned HOST_WIDE_INT shiftc = tree_low_cst (arg2, 1);
- unsigned HOST_WIDE_INT outerc = tree_low_cst (TYPE_SIZE (type), 1);
+ unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
+ unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
unsigned HOST_WIDE_INT innerc
- = tree_low_cst (TYPE_SIZE (TREE_TYPE (type)), 1);
+ = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
if (shiftc >= outerc || (shiftc % innerc) != 0)
return NULL_TREE;
int offset = shiftc / innerc;
- /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
- For reductions, compiler emits VEC_RSHIFT_EXPR always,
- for !BYTES_BIG_ENDIAN picks first vector element, but
- for BYTES_BIG_ENDIAN last element from the vector. */
- if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
+ /* The direction of VEC_RSHIFT_EXPR is endian dependent.
+ For reductions, if !BYTES_BIG_ENDIAN then compiler picks first
+ vector element, but last element if BYTES_BIG_ENDIAN. */
+ if (BYTES_BIG_ENDIAN)
offset = -offset;
tree zero = build_zero_cst (TREE_TYPE (type));
for (i = 0; i < count; i++)
static tree
fold_convert_const_int_from_int (tree type, const_tree arg1)
{
- tree t;
-
/* Given an integer constant, make new constant with new type,
- appropriately sign-extended or truncated. */
- t = force_fit_type_double (type, tree_to_double_int (arg1),
- !POINTER_TYPE_P (TREE_TYPE (arg1)),
- (TREE_INT_CST_HIGH (arg1) < 0
- && (TYPE_UNSIGNED (type)
- < TYPE_UNSIGNED (TREE_TYPE (arg1))))
- | TREE_OVERFLOW (arg1));
-
- return t;
+ appropriately sign-extended or truncated. Use widest_int
+ so that any extension is done according ARG1's type. */
+ return force_fit_type (type, wi::to_widest (arg1),
+ !POINTER_TYPE_P (TREE_TYPE (arg1)),
+ TREE_OVERFLOW (arg1));
}
/* A subroutine of fold_convert_const handling conversions a REAL_CST
static tree
fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
{
- int overflow = 0;
+ bool overflow = false;
tree t;
/* The following code implements the floating point to integer
C and C++ standards that simply state that the behavior of
FP-to-integer conversion is unspecified upon overflow. */
- double_int val;
+ wide_int val;
REAL_VALUE_TYPE r;
REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
/* If R is NaN, return zero and show we have an overflow. */
if (REAL_VALUE_ISNAN (r))
{
- overflow = 1;
- val = double_int_zero;
+ overflow = true;
+ val = wi::zero (TYPE_PRECISION (type));
}
/* See if R is less than the lower bound or greater than the
REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
if (REAL_VALUES_LESS (r, l))
{
- overflow = 1;
- val = tree_to_double_int (lt);
+ overflow = true;
+ val = lt;
}
}
REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
if (REAL_VALUES_LESS (u, r))
{
- overflow = 1;
- val = tree_to_double_int (ut);
+ overflow = true;
+ val = ut;
}
}
}
if (! overflow)
- real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
+ val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
- t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
+ t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
return t;
}
/* Given a fixed-point constant, make new constant with new type,
appropriately sign-extended or truncated. */
- t = force_fit_type_double (type, temp, -1,
- (temp.is_negative ()
- && (TYPE_UNSIGNED (type)
- < TYPE_UNSIGNED (TREE_TYPE (arg1))))
- | TREE_OVERFLOW (arg1));
+ t = force_fit_type (type, temp, -1,
+ (temp.is_negative ()
+ && (TYPE_UNSIGNED (type)
+ < TYPE_UNSIGNED (TREE_TYPE (arg1))))
+ | TREE_OVERFLOW (arg1));
return t;
}
FIXED_VALUE_TYPE value;
tree t;
bool overflow_p;
+ double_int di;
+
+ gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
- overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
- TREE_INT_CST (arg1),
+ di.low = TREE_INT_CST_ELT (arg1, 0);
+ if (TREE_INT_CST_NUNITS (arg1) == 1)
+ di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
+ else
+ di.high = TREE_INT_CST_ELT (arg1, 1);
+
+ overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
TYPE_UNSIGNED (TREE_TYPE (arg1)),
TYPE_SATURATING (type));
t = build_fixed (type, value);
return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
}
-/* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
- Zero means allow extended lvalues. */
-
-int pedantic_lvalues;
-
/* When pedantic, return an expr equal to X but certainly not valid as a
pedantic lvalue. Otherwise, return X. */
static tree
pedantic_non_lvalue_loc (location_t loc, tree x)
{
- if (pedantic_lvalues)
- return non_lvalue_loc (loc, x);
-
return protected_set_expr_location_unshare (x, loc);
}
\f
case COMPONENT_REF:
/* Handle operand 2 the same as for ARRAY_REF. Operand 0
may be NULL when we're called to compare MEM_EXPRs. */
- if (!OP_SAME_WITH_NULL (0))
+ if (!OP_SAME_WITH_NULL (0)
+ || !OP_SAME (1))
return 0;
flags &= ~OEP_CONSTANT_ADDRESS_OF;
- return OP_SAME (1) && OP_SAME_WITH_NULL (2);
+ return OP_SAME_WITH_NULL (2);
case BIT_FIELD_REF:
if (!OP_SAME (0))
return non_lvalue_loc (loc, t);
}
-/* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
-
-static tree
-pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
- tree omitted)
-{
- tree t = fold_convert_loc (loc, type, result);
-
- /* If the resulting operand is an empty statement, just return the omitted
- statement casted to void. */
- if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
- return build1_loc (loc, NOP_EXPR, void_type_node,
- fold_ignored_result (omitted));
-
- if (TREE_SIDE_EFFECTS (omitted))
- return build2_loc (loc, COMPOUND_EXPR, type,
- fold_ignored_result (omitted), t);
-
- return pedantic_non_lvalue_loc (loc, t);
-}
-
/* Return a tree for the case when the result of an expression is RESULT
converted to TYPE and OMITTED1 and OMITTED2 were previously operands
of the expression but are now not needed.
tree size = TYPE_SIZE (TREE_TYPE (inner));
if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
|| POINTER_TYPE_P (TREE_TYPE (inner)))
- && host_integerp (size, 0)
- && tree_low_cst (size, 0) == bitsize)
+ && tree_fits_shwi_p (size)
+ && tree_to_shwi (size) == bitsize)
return fold_convert_loc (loc, type, inner);
}
{
HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
tree type = TREE_TYPE (lhs);
- tree signed_type, unsigned_type;
+ tree unsigned_type;
int const_p = TREE_CODE (rhs) == INTEGER_CST;
- enum machine_mode lmode, rmode, nmode;
+ machine_mode lmode, rmode, nmode;
int lunsignedp, runsignedp;
int lvolatilep = 0, rvolatilep = 0;
tree linner, rinner = NULL_TREE;
tree mask;
tree offset;
- /* In the strict volatile bitfields case, doing code changes here may prevent
- other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
- if (flag_strict_volatile_bitfields > 0)
- return 0;
-
/* Get all the information about the extractions being done. If the bit size
if the same as the size of the underlying object, we aren't doing an
extraction at all and so can do nothing. We also don't want to
linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
&lunsignedp, &lvolatilep, false);
if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
- || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
+ || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
return 0;
if (!const_p)
if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
|| lunsignedp != runsignedp || offset != 0
- || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
+ || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
return 0;
}
/* See if we can find a mode to refer to this field. We should be able to,
but fail if we can't. */
- if (lvolatilep
- && GET_MODE_BITSIZE (lmode) > 0
- && flag_strict_volatile_bitfields > 0)
- nmode = lmode;
- else
- nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
- const_p ? TYPE_ALIGN (TREE_TYPE (linner))
- : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
- TYPE_ALIGN (TREE_TYPE (rinner))),
- word_mode, lvolatilep || rvolatilep);
+ nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
+ const_p ? TYPE_ALIGN (TREE_TYPE (linner))
+ : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
+ TYPE_ALIGN (TREE_TYPE (rinner))),
+ word_mode, false);
if (nmode == VOIDmode)
return 0;
/* Set signed and unsigned types of the precision of this mode for the
shifts below. */
- signed_type = lang_hooks.types.type_for_mode (nmode, 0);
unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
/* Compute the bit position and size for the new reference and our offset
if (lunsignedp)
{
- if (! integer_zerop (const_binop (RSHIFT_EXPR,
- fold_convert_loc (loc,
- unsigned_type, rhs),
- size_int (lbitsize))))
+ if (wi::lrshift (rhs, lbitsize) != 0)
{
warning (0, "comparison is always %d due to width of bit-field",
code == NE_EXPR);
}
else
{
- tree tem = const_binop (RSHIFT_EXPR,
- fold_convert_loc (loc, signed_type, rhs),
- size_int (lbitsize - 1));
- if (! integer_zerop (tem) && ! integer_all_onesp (tem))
+ wide_int tem = wi::arshift (rhs, lbitsize - 1);
+ if (tem != 0 && tem != -1)
{
warning (0, "comparison is always %d due to width of bit-field",
code == NE_EXPR);
appropriate number of bits and mask it with the computed mask
(in case this was a signed field). If we changed it, make a new one. */
lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
- if (lvolatilep)
- {
- TREE_SIDE_EFFECTS (lhs) = 1;
- TREE_THIS_VOLATILE (lhs) = 1;
- }
rhs = const_binop (BIT_AND_EXPR,
const_binop (LSHIFT_EXPR,
static tree
decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
- HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
+ HOST_WIDE_INT *pbitpos, machine_mode *pmode,
int *punsignedp, int *pvolatilep,
tree *pmask, tree *pand_mask)
{
}
/* Return nonzero if MASK represents a mask of SIZE ones in the low-order
- bit positions. */
+ bit positions and MASK is SIGNED. */
static int
-all_ones_mask_p (const_tree mask, int size)
+all_ones_mask_p (const_tree mask, unsigned int size)
{
tree type = TREE_TYPE (mask);
unsigned int precision = TYPE_PRECISION (type);
- tree tmask;
- tmask = build_int_cst_type (signed_type_for (type), -1);
+ /* If this function returns true when the type of the mask is
+ UNSIGNED, then there will be errors. In particular see
+ gcc.c-torture/execute/990326-1.c. There does not appear to be
+ any documentation paper trail as to why this is so. But the pre
+ wide-int worked with that restriction and it has been preserved
+ here. */
+ if (size > precision || TYPE_SIGN (type) == UNSIGNED)
+ return false;
- return
- tree_int_cst_equal (mask,
- const_binop (RSHIFT_EXPR,
- const_binop (LSHIFT_EXPR, tmask,
- size_int (precision - size)),
- size_int (precision - size)));
+ return wi::mask (size, false, precision) == mask;
}
/* Subroutine for fold: determine if VAL is the INTEGER_CONST that
static tree
sign_bit_p (tree exp, const_tree val)
{
- unsigned HOST_WIDE_INT mask_lo, lo;
- HOST_WIDE_INT mask_hi, hi;
int width;
tree t;
return NULL_TREE;
width = TYPE_PRECISION (t);
- if (width > HOST_BITS_PER_WIDE_INT)
- {
- hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
- lo = 0;
-
- mask_hi = (HOST_WIDE_INT_M1U >> (HOST_BITS_PER_DOUBLE_INT - width));
- mask_lo = -1;
- }
- else
- {
- hi = 0;
- lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
-
- mask_hi = 0;
- mask_lo = (HOST_WIDE_INT_M1U >> (HOST_BITS_PER_WIDE_INT - width));
- }
-
- /* We mask off those bits beyond TREE_TYPE (exp) so that we can
- treat VAL as if it were unsigned. */
- if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
- && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
+ if (wi::only_sign_bit_p (val, width))
return exp;
/* Handle extension from a narrower type. */
{
in_p = ! in_p;
high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
- integer_one_node, 0);
+ build_int_cst (TREE_TYPE (low), 1), 0);
low = build_int_cst (arg0_type, 0);
}
}
if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
{
low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
- integer_one_node, 0);
+ build_int_cst (TREE_TYPE (n_high), 1), 0);
high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
- integer_one_node, 0);
+ build_int_cst (TREE_TYPE (n_low), 1), 0);
/* If the range is of the form +/- [ x+1, x ], we won't
be able to normalize it. But then, it represents the
/* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
{
- unsigned HOST_WIDE_INT lo;
- HOST_WIDE_INT hi;
- int prec;
-
- prec = TYPE_PRECISION (etype);
- if (prec <= HOST_BITS_PER_WIDE_INT)
- {
- hi = 0;
- lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
- }
- else
- {
- hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
- lo = HOST_WIDE_INT_M1U;
- }
+ int prec = TYPE_PRECISION (etype);
- if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
+ if (wi::mask (prec - 1, false, prec) == high)
{
if (TYPE_UNSIGNED (etype))
{
utype = unsigned_type_for (etype);
maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
- integer_one_node, 1);
+ build_int_cst (TREE_TYPE (maxv), 1), 1);
minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
&& operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
return 0;
else
- return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
+ return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
+ build_int_cst (TREE_TYPE (val), 1), 0);
}
/* Return the successor of VAL in its type, handling the infinite case. */
&& operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
return 0;
else
- return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
+ return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
+ build_int_cst (TREE_TYPE (val), 1), 0);
}
/* Given two ranges, see if we can merge them into one. Return 1 if we
if (TYPE_UNSIGNED (TREE_TYPE (high1))
&& integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
high1, 1,
- integer_one_node, 1)))
+ build_int_cst (TREE_TYPE (high1), 1),
+ 1)))
high1 = 0;
break;
default:
int in0_p, in1_p, in_p;
tree low0, low1, low, high0, high1, high;
bool strict_overflow_p = false;
- tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
- tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
- tree tem;
+ tree tem, lhs, rhs;
const char * const warnmsg = G_("assuming signed overflow does not occur "
"when simplifying range test");
+ if (!INTEGRAL_TYPE_P (type))
+ return 0;
+
+ lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
+ rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
+
/* If this is an OR operation, invert both sides; we will invert
again at the end. */
if (or_op)
/* We work by getting just the sign bit into the low-order bit, then
into the high-order bit, then sign-extend. We then XOR that value
with C. */
- temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
- temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
+ temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
/* We must use a signed type in order to get an arithmetic right shift.
However, we must also avoid introducing accidental overflows, so that
HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
- enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
- enum machine_mode lnmode, rnmode;
+ machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
+ machine_mode lnmode, rnmode;
tree ll_mask, lr_mask, rl_mask, rr_mask;
tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
tree l_const, r_const;
/* For a constant, we can always simplify if we are a multiply
or (for divide and modulus) if it is a multiple of our constant. */
if (code == MULT_EXPR
- || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
+ || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
return const_binop (code, fold_convert (ctype, t),
fold_convert (ctype, c));
break;
&& (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
/* const_binop may not detect overflow correctly,
so check for it explicitly here. */
- && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
- && TREE_INT_CST_HIGH (op1) == 0
+ && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
&& 0 != (t1 = fold_convert (ctype,
const_binop (LSHIFT_EXPR,
size_one_node,
/* If it's a multiply or a division/modulus operation of a multiple
of our constant, do the operation and verify it doesn't overflow. */
if (code == MULT_EXPR
- || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
+ || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
{
op1 = const_binop (code, fold_convert (ctype, op1),
fold_convert (ctype, c));
/* If the multiplication can overflow we cannot optimize this. */
&& TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
&& TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
- && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
+ && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
{
*strict_overflow_p = true;
return omit_one_operand (type, integer_zero_node, op0);
assuming no overflow. */
if (tcode == code)
{
- double_int mul;
- bool overflow_p;
- unsigned prec = TYPE_PRECISION (ctype);
- bool uns = TYPE_UNSIGNED (ctype);
- double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
- double_int dic = tree_to_double_int (c).ext (prec, uns);
- mul = diop1.mul_with_sign (dic, false, &overflow_p);
- overflow_p = ((!uns && overflow_p)
- | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
- if (!double_int_fits_to_tree_p (ctype, mul)
- && ((uns && tcode != MULT_EXPR) || !uns))
- overflow_p = 1;
+ bool overflow_p = false;
+ bool overflow_mul_p;
+ signop sign = TYPE_SIGN (ctype);
+ wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
+ overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
+ if (overflow_mul_p
+ && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
+ overflow_p = true;
if (!overflow_p)
return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
- double_int_to_tree (ctype, mul));
+ wide_int_to_tree (ctype, mul));
}
/* If these operations "cancel" each other, we have the main
&& code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
&& code != MULT_EXPR)))
{
- if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
+ if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
{
if (TYPE_OVERFLOW_UNDEFINED (ctype))
*strict_overflow_p = true;
const_binop (TRUNC_DIV_EXPR,
op1, c)));
}
- else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
+ else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
{
if (TYPE_OVERFLOW_UNDEFINED (ctype))
*strict_overflow_p = true;
if (BUILTIN_SQRT_P (fcode))
{
tree arg = CALL_EXPR_ARG (arg0, 0);
- enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
+ machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
c = TREE_REAL_CST (arg1);
if (REAL_VALUE_NEGATIVE (c))
fold_inf_compare (location_t loc, enum tree_code code, tree type,
tree arg0, tree arg1)
{
- enum machine_mode mode;
+ machine_mode mode;
REAL_VALUE_TYPE max;
tree temp;
bool neg;
tree prod, tmp, hi, lo;
tree arg00 = TREE_OPERAND (arg0, 0);
tree arg01 = TREE_OPERAND (arg0, 1);
- double_int val;
- bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
- bool neg_overflow;
+ signop sign = TYPE_SIGN (TREE_TYPE (arg0));
+ bool neg_overflow = false;
bool overflow;
/* We have to do this the hard way to detect unsigned overflow.
prod = int_const_binop (MULT_EXPR, arg01, arg1); */
- val = TREE_INT_CST (arg01)
- .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
- prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
+ wide_int val = wi::mul (arg01, arg1, sign, &overflow);
+ prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
neg_overflow = false;
- if (unsigned_p)
+ if (sign == UNSIGNED)
{
tmp = int_const_binop (MINUS_EXPR, arg01,
build_int_cst (TREE_TYPE (arg01), 1));
lo = prod;
/* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
- val = TREE_INT_CST (prod)
- .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
- hi = force_fit_type_double (TREE_TYPE (arg00), val,
- -1, overflow | TREE_OVERFLOW (prod));
+ val = wi::add (prod, tmp, sign, &overflow);
+ hi = force_fit_type (TREE_TYPE (arg00), val,
+ -1, overflow | TREE_OVERFLOW (prod));
}
else if (tree_int_cst_sgn (arg01) >= 0)
{
/* This is only a win if casting to a signed type is cheap,
i.e. when arg00's type is not a partial mode. */
&& TYPE_PRECISION (TREE_TYPE (arg00))
- == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
+ == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
{
tree stype = signed_type_for (TREE_TYPE (arg00));
return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
tree inner = TREE_OPERAND (arg0, 0);
tree type = TREE_TYPE (arg0);
int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
- enum machine_mode operand_mode = TYPE_MODE (type);
+ machine_mode operand_mode = TYPE_MODE (type);
int ops_unsigned;
tree signed_type, unsigned_type, intermediate_type;
tree tem, one;
not overflow, adjust BITNUM and INNER. */
if (TREE_CODE (inner) == RSHIFT_EXPR
&& TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
- && host_integerp (TREE_OPERAND (inner, 1), 1)
&& bitnum < TYPE_PRECISION (type)
- && (TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
- < (unsigned) (TYPE_PRECISION (type) - bitnum)))
+ && wi::ltu_p (TREE_OPERAND (inner, 1),
+ TYPE_PRECISION (type) - bitnum))
{
- bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
+ bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
inner = TREE_OPERAND (inner, 0);
}
bool
tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
{
- STRIP_SIGN_NOPS (arg0);
- STRIP_SIGN_NOPS (arg1);
-
- if (TREE_CODE (arg1) == INTEGER_CST)
- return 0;
- if (TREE_CODE (arg0) == INTEGER_CST)
- return 1;
-
- if (TREE_CODE (arg1) == REAL_CST)
- return 0;
- if (TREE_CODE (arg0) == REAL_CST)
- return 1;
-
- if (TREE_CODE (arg1) == FIXED_CST)
+ if (CONSTANT_CLASS_P (arg1))
return 0;
- if (TREE_CODE (arg0) == FIXED_CST)
+ if (CONSTANT_CLASS_P (arg0))
return 1;
- if (TREE_CODE (arg1) == COMPLEX_CST)
- return 0;
- if (TREE_CODE (arg0) == COMPLEX_CST)
- return 1;
+ STRIP_SIGN_NOPS (arg0);
+ STRIP_SIGN_NOPS (arg1);
if (TREE_CONSTANT (arg1))
return 0;
if (TREE_CONSTANT (arg0))
return 1;
- if (optimize_function_for_size_p (cfun))
- return 0;
-
if (reorder && flag_evaluation_order
&& (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
return 0;
return NULL_TREE;
if (TREE_CODE (arg1) == INTEGER_CST)
- arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
- 0, TREE_OVERFLOW (arg1));
+ arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
+ TREE_OVERFLOW (arg1));
else
arg1 = fold_convert_loc (loc, inner_type, arg1);
return fold_build2_loc (loc, code, type, arg0_inner, arg1);
}
-/* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
- step of the array. Reconstructs s and delta in the case of s *
- delta being an integer constant (and thus already folded). ADDR is
- the address. MULT is the multiplicative expression. If the
- function succeeds, the new address expression is returned.
- Otherwise NULL_TREE is returned. LOC is the location of the
- resulting expression. */
-
-static tree
-try_move_mult_to_index (location_t loc, tree addr, tree op1)
-{
- tree s, delta, step;
- tree ref = TREE_OPERAND (addr, 0), pref;
- tree ret, pos;
- tree itype;
- bool mdim = false;
-
- /* Strip the nops that might be added when converting op1 to sizetype. */
- STRIP_NOPS (op1);
-
- /* Canonicalize op1 into a possibly non-constant delta
- and an INTEGER_CST s. */
- if (TREE_CODE (op1) == MULT_EXPR)
- {
- tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
-
- STRIP_NOPS (arg0);
- STRIP_NOPS (arg1);
-
- if (TREE_CODE (arg0) == INTEGER_CST)
- {
- s = arg0;
- delta = arg1;
- }
- else if (TREE_CODE (arg1) == INTEGER_CST)
- {
- s = arg1;
- delta = arg0;
- }
- else
- return NULL_TREE;
- }
- else if (TREE_CODE (op1) == INTEGER_CST)
- {
- delta = op1;
- s = NULL_TREE;
- }
- else
- {
- /* Simulate we are delta * 1. */
- delta = op1;
- s = integer_one_node;
- }
-
- /* Handle &x.array the same as we would handle &x.array[0]. */
- if (TREE_CODE (ref) == COMPONENT_REF
- && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
- {
- tree domain;
-
- /* Remember if this was a multi-dimensional array. */
- if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
- mdim = true;
-
- domain = TYPE_DOMAIN (TREE_TYPE (ref));
- if (! domain)
- goto cont;
- itype = TREE_TYPE (domain);
-
- step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
- if (TREE_CODE (step) != INTEGER_CST)
- goto cont;
-
- if (s)
- {
- if (! tree_int_cst_equal (step, s))
- goto cont;
- }
- else
- {
- /* Try if delta is a multiple of step. */
- tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
- if (! tmp)
- goto cont;
- delta = tmp;
- }
-
- /* Only fold here if we can verify we do not overflow one
- dimension of a multi-dimensional array. */
- if (mdim)
- {
- tree tmp;
-
- if (!TYPE_MIN_VALUE (domain)
- || !TYPE_MAX_VALUE (domain)
- || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
- goto cont;
-
- tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
- fold_convert_loc (loc, itype,
- TYPE_MIN_VALUE (domain)),
- fold_convert_loc (loc, itype, delta));
- if (TREE_CODE (tmp) != INTEGER_CST
- || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
- goto cont;
- }
-
- /* We found a suitable component reference. */
-
- pref = TREE_OPERAND (addr, 0);
- ret = copy_node (pref);
- SET_EXPR_LOCATION (ret, loc);
-
- ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
- fold_build2_loc
- (loc, PLUS_EXPR, itype,
- fold_convert_loc (loc, itype,
- TYPE_MIN_VALUE
- (TYPE_DOMAIN (TREE_TYPE (ref)))),
- fold_convert_loc (loc, itype, delta)),
- NULL_TREE, NULL_TREE);
- return build_fold_addr_expr_loc (loc, ret);
- }
-
-cont:
-
- for (;; ref = TREE_OPERAND (ref, 0))
- {
- if (TREE_CODE (ref) == ARRAY_REF)
- {
- tree domain;
-
- /* Remember if this was a multi-dimensional array. */
- if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
- mdim = true;
-
- domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
- if (! domain)
- continue;
- itype = TREE_TYPE (domain);
-
- step = array_ref_element_size (ref);
- if (TREE_CODE (step) != INTEGER_CST)
- continue;
-
- if (s)
- {
- if (! tree_int_cst_equal (step, s))
- continue;
- }
- else
- {
- /* Try if delta is a multiple of step. */
- tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
- if (! tmp)
- continue;
- delta = tmp;
- }
-
- /* Only fold here if we can verify we do not overflow one
- dimension of a multi-dimensional array. */
- if (mdim)
- {
- tree tmp;
-
- if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
- || !TYPE_MAX_VALUE (domain)
- || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
- continue;
-
- tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
- fold_convert_loc (loc, itype,
- TREE_OPERAND (ref, 1)),
- fold_convert_loc (loc, itype, delta));
- if (!tmp
- || TREE_CODE (tmp) != INTEGER_CST
- || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
- continue;
- }
-
- break;
- }
- else
- mdim = false;
-
- if (!handled_component_p (ref))
- return NULL_TREE;
- }
-
- /* We found the suitable array reference. So copy everything up to it,
- and replace the index. */
-
- pref = TREE_OPERAND (addr, 0);
- ret = copy_node (pref);
- SET_EXPR_LOCATION (ret, loc);
- pos = ret;
-
- while (pref != ref)
- {
- pref = TREE_OPERAND (pref, 0);
- TREE_OPERAND (pos, 0) = copy_node (pref);
- pos = TREE_OPERAND (pos, 0);
- }
-
- TREE_OPERAND (pos, 1)
- = fold_build2_loc (loc, PLUS_EXPR, itype,
- fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
- fold_convert_loc (loc, itype, delta));
- return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
-}
-
/* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
means A >= Y && A != MAX, but in this case we know that
arg10 = build_one_cst (type);
/* As we canonicalize A - 2 to A + -2 get rid of that sign for
the purpose of this canonicalization. */
- if (TREE_INT_CST_HIGH (arg1) == -1
+ if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
&& negate_expr_p (arg1)
&& code == PLUS_EXPR)
{
/* No identical multiplicands; see if we can find a common
power-of-two factor in non-power-of-two multiplies. This
can help in multi-dimensional array access. */
- else if (host_integerp (arg01, 0)
- && host_integerp (arg11, 0))
+ else if (tree_fits_shwi_p (arg01)
+ && tree_fits_shwi_p (arg11))
{
HOST_WIDE_INT int01, int11, tmp;
bool swap = false;
tree maybe_same;
- int01 = TREE_INT_CST_LOW (arg01);
- int11 = TREE_INT_CST_LOW (arg11);
+ int01 = tree_to_shwi (arg01);
+ int11 = tree_to_shwi (arg11);
/* Move min of absolute values to int11. */
if (absu_hwi (int01) < absu_hwi (int11))
upon failure. */
static int
-native_encode_int (const_tree expr, unsigned char *ptr, int len)
+native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
{
tree type = TREE_TYPE (expr);
int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
int byte, offset, word, words;
unsigned char value;
- if (total_bytes > len)
+ if ((off == -1 && total_bytes > len)
+ || off >= total_bytes)
return 0;
+ if (off == -1)
+ off = 0;
words = total_bytes / UNITS_PER_WORD;
for (byte = 0; byte < total_bytes; byte++)
{
int bitpos = byte * BITS_PER_UNIT;
- if (bitpos < HOST_BITS_PER_WIDE_INT)
- value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
- else
- value = (unsigned char) (TREE_INT_CST_HIGH (expr)
- >> (bitpos - HOST_BITS_PER_WIDE_INT));
+ /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
+ number of bytes. */
+ value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
if (total_bytes > UNITS_PER_WORD)
{
}
else
offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
- ptr[offset] = value;
+ if (offset >= off
+ && offset - off < len)
+ ptr[offset - off] = value;
}
- return total_bytes;
+ return MIN (len, total_bytes - off);
}
upon failure. */
static int
-native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
+native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
{
tree type = TREE_TYPE (expr);
- enum machine_mode mode = TYPE_MODE (type);
+ machine_mode mode = TYPE_MODE (type);
int total_bytes = GET_MODE_SIZE (mode);
FIXED_VALUE_TYPE value;
tree i_value, i_type;
value = TREE_FIXED_CST (expr);
i_value = double_int_to_tree (i_type, value.data);
- return native_encode_int (i_value, ptr, len);
+ return native_encode_int (i_value, ptr, len, off);
}
upon failure. */
static int
-native_encode_real (const_tree expr, unsigned char *ptr, int len)
+native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
{
tree type = TREE_TYPE (expr);
int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
up to 192 bits. */
long tmp[6];
- if (total_bytes > len)
+ if ((off == -1 && total_bytes > len)
+ || off >= total_bytes)
return 0;
+ if (off == -1)
+ off = 0;
words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
}
else
offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
- ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
+ offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
+ if (offset >= off
+ && offset - off < len)
+ ptr[offset - off] = value;
}
- return total_bytes;
+ return MIN (len, total_bytes - off);
}
/* Subroutine of native_encode_expr. Encode the COMPLEX_CST
upon failure. */
static int
-native_encode_complex (const_tree expr, unsigned char *ptr, int len)
+native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
{
int rsize, isize;
tree part;
part = TREE_REALPART (expr);
- rsize = native_encode_expr (part, ptr, len);
- if (rsize == 0)
+ rsize = native_encode_expr (part, ptr, len, off);
+ if (off == -1
+ && rsize == 0)
return 0;
part = TREE_IMAGPART (expr);
- isize = native_encode_expr (part, ptr+rsize, len-rsize);
- if (isize != rsize)
+ if (off != -1)
+ off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
+ isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
+ if (off == -1
+ && isize != rsize)
return 0;
return rsize + isize;
}
upon failure. */
static int
-native_encode_vector (const_tree expr, unsigned char *ptr, int len)
+native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
{
unsigned i, count;
int size, offset;
size = GET_MODE_SIZE (TYPE_MODE (itype));
for (i = 0; i < count; i++)
{
+ if (off >= size)
+ {
+ off -= size;
+ continue;
+ }
elem = VECTOR_CST_ELT (expr, i);
- if (native_encode_expr (elem, ptr+offset, len-offset) != size)
+ int res = native_encode_expr (elem, ptr+offset, len-offset, off);
+ if ((off == -1 && res != size)
+ || res == 0)
return 0;
- offset += size;
+ offset += res;
+ if (offset >= len)
+ return offset;
+ if (off != -1)
+ off = 0;
}
return offset;
}
upon failure. */
static int
-native_encode_string (const_tree expr, unsigned char *ptr, int len)
+native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
{
tree type = TREE_TYPE (expr);
HOST_WIDE_INT total_bytes;
if (TREE_CODE (type) != ARRAY_TYPE
|| TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
|| GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
- || !host_integerp (TYPE_SIZE_UNIT (type), 0))
+ || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
return 0;
- total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
- if (total_bytes > len)
+ total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
+ if ((off == -1 && total_bytes > len)
+ || off >= total_bytes)
return 0;
- if (TREE_STRING_LENGTH (expr) < total_bytes)
+ if (off == -1)
+ off = 0;
+ if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
{
- memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
- memset (ptr + TREE_STRING_LENGTH (expr), 0,
- total_bytes - TREE_STRING_LENGTH (expr));
+ int written = 0;
+ if (off < TREE_STRING_LENGTH (expr))
+ {
+ written = MIN (len, TREE_STRING_LENGTH (expr) - off);
+ memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
+ }
+ memset (ptr + written, 0,
+ MIN (total_bytes - written, len - written));
}
else
- memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
- return total_bytes;
+ memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
+ return MIN (total_bytes - off, len);
}
/* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
- buffer PTR of length LEN bytes. Return the number of bytes
- placed in the buffer, or zero upon failure. */
+ buffer PTR of length LEN bytes. If OFF is not -1 then start
+ the encoding at byte offset OFF and encode at most LEN bytes.
+ Return the number of bytes placed in the buffer, or zero upon failure. */
int
-native_encode_expr (const_tree expr, unsigned char *ptr, int len)
+native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
{
switch (TREE_CODE (expr))
{
case INTEGER_CST:
- return native_encode_int (expr, ptr, len);
+ return native_encode_int (expr, ptr, len, off);
case REAL_CST:
- return native_encode_real (expr, ptr, len);
+ return native_encode_real (expr, ptr, len, off);
case FIXED_CST:
- return native_encode_fixed (expr, ptr, len);
+ return native_encode_fixed (expr, ptr, len, off);
case COMPLEX_CST:
- return native_encode_complex (expr, ptr, len);
+ return native_encode_complex (expr, ptr, len, off);
case VECTOR_CST:
- return native_encode_vector (expr, ptr, len);
+ return native_encode_vector (expr, ptr, len, off);
case STRING_CST:
- return native_encode_string (expr, ptr, len);
+ return native_encode_string (expr, ptr, len, off);
default:
return 0;
native_interpret_int (tree type, const unsigned char *ptr, int len)
{
int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
- double_int result;
if (total_bytes > len
|| total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
return NULL_TREE;
- result = double_int::from_buffer (ptr, total_bytes);
+ wide_int result = wi::from_buffer (ptr, total_bytes);
- return double_int_to_tree (type, result);
+ return wide_int_to_tree (type, result);
}
static tree
native_interpret_real (tree type, const unsigned char *ptr, int len)
{
- enum machine_mode mode = TYPE_MODE (type);
+ machine_mode mode = TYPE_MODE (type);
int total_bytes = GET_MODE_SIZE (mode);
int byte, offset, word, words, bitpos;
unsigned char value;
gcc_assert (IS_EXPR_CODE_CLASS (kind)
&& TREE_CODE_LENGTH (code) == 1);
+ tem = generic_simplify (loc, code, type, op0);
+ if (tem)
+ return tem;
+
arg0 = op0;
if (arg0)
{
switch (code)
{
- case PAREN_EXPR:
- /* Re-association barriers around constants and other re-association
- barriers can be removed. */
- if (CONSTANT_CLASS_P (op0)
- || TREE_CODE (op0) == PAREN_EXPR)
+ case NON_LVALUE_EXPR:
+ if (!maybe_lvalue_p (op0))
return fold_convert_loc (loc, type, op0);
return NULL_TREE;
CASE_CONVERT:
case FLOAT_EXPR:
case FIX_TRUNC_EXPR:
- if (TREE_TYPE (op0) == type)
- return op0;
-
if (COMPARISON_CLASS_P (op0))
{
/* If we have (type) (a CMP b) and type is an integral type, return
constant_boolean_node (false, type));
}
- /* Handle cases of two conversions in a row. */
- if (CONVERT_EXPR_P (op0))
- {
- tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
- tree inter_type = TREE_TYPE (op0);
- int inside_int = INTEGRAL_TYPE_P (inside_type);
- int inside_ptr = POINTER_TYPE_P (inside_type);
- int inside_float = FLOAT_TYPE_P (inside_type);
- int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
- unsigned int inside_prec = TYPE_PRECISION (inside_type);
- int inside_unsignedp = TYPE_UNSIGNED (inside_type);
- int inter_int = INTEGRAL_TYPE_P (inter_type);
- int inter_ptr = POINTER_TYPE_P (inter_type);
- int inter_float = FLOAT_TYPE_P (inter_type);
- int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
- unsigned int inter_prec = TYPE_PRECISION (inter_type);
- int inter_unsignedp = TYPE_UNSIGNED (inter_type);
- int final_int = INTEGRAL_TYPE_P (type);
- int final_ptr = POINTER_TYPE_P (type);
- int final_float = FLOAT_TYPE_P (type);
- int final_vec = TREE_CODE (type) == VECTOR_TYPE;
- unsigned int final_prec = TYPE_PRECISION (type);
- int final_unsignedp = TYPE_UNSIGNED (type);
-
- /* In addition to the cases of two conversions in a row
- handled below, if we are converting something to its own
- type via an object of identical or wider precision, neither
- conversion is needed. */
- if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
- && (((inter_int || inter_ptr) && final_int)
- || (inter_float && final_float))
- && inter_prec >= final_prec)
- return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
-
- /* Likewise, if the intermediate and initial types are either both
- float or both integer, we don't need the middle conversion if the
- former is wider than the latter and doesn't change the signedness
- (for integers). Avoid this if the final type is a pointer since
- then we sometimes need the middle conversion. Likewise if the
- final type has a precision not equal to the size of its mode. */
- if (((inter_int && inside_int)
- || (inter_float && inside_float)
- || (inter_vec && inside_vec))
- && inter_prec >= inside_prec
- && (inter_float || inter_vec
- || inter_unsignedp == inside_unsignedp)
- && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
- && TYPE_MODE (type) == TYPE_MODE (inter_type))
- && ! final_ptr
- && (! final_vec || inter_prec == inside_prec))
- return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
-
- /* If we have a sign-extension of a zero-extended value, we can
- replace that by a single zero-extension. Likewise if the
- final conversion does not change precision we can drop the
- intermediate conversion. */
- if (inside_int && inter_int && final_int
- && ((inside_prec < inter_prec && inter_prec < final_prec
- && inside_unsignedp && !inter_unsignedp)
- || final_prec == inter_prec))
- return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
-
- /* Two conversions in a row are not needed unless:
- - some conversion is floating-point (overstrict for now), or
- - some conversion is a vector (overstrict for now), or
- - the intermediate type is narrower than both initial and
- final, or
- - the intermediate type and innermost type differ in signedness,
- and the outermost type is wider than the intermediate, or
- - the initial type is a pointer type and the precisions of the
- intermediate and final types differ, or
- - the final type is a pointer type and the precisions of the
- initial and intermediate types differ. */
- if (! inside_float && ! inter_float && ! final_float
- && ! inside_vec && ! inter_vec && ! final_vec
- && (inter_prec >= inside_prec || inter_prec >= final_prec)
- && ! (inside_int && inter_int
- && inter_unsignedp != inside_unsignedp
- && inter_prec < final_prec)
- && ((inter_unsignedp && inter_prec > inside_prec)
- == (final_unsignedp && final_prec > inter_prec))
- && ! (inside_ptr && inter_prec != final_prec)
- && ! (final_ptr && inside_prec != inter_prec)
- && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
- && TYPE_MODE (type) == TYPE_MODE (inter_type)))
- return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
- }
-
/* Handle (T *)&A.B.C for A being of type T and B and C
living at offset zero. This occurs frequently in
C++ upcasting and then accessing the base. */
{
HOST_WIDE_INT bitsize, bitpos;
tree offset;
- enum machine_mode mode;
+ machine_mode mode;
int unsignedp, volatilep;
tree base = TREE_OPERAND (op0, 0);
base = get_inner_reference (base, &bitsize, &bitpos, &offset,
change = 1;
else if (TYPE_PRECISION (TREE_TYPE (and1))
<= HOST_BITS_PER_WIDE_INT
- && host_integerp (and1, 1))
+ && tree_fits_uhwi_p (and1))
{
unsigned HOST_WIDE_INT cst;
- cst = tree_low_cst (and1, 1);
+ cst = tree_to_uhwi (and1);
cst &= HOST_WIDE_INT_M1U
<< (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
change = (cst == 0);
}
if (change)
{
- tem = force_fit_type_double (type, tree_to_double_int (and1),
- 0, TREE_OVERFLOW (and1));
+ tem = force_fit_type (type, wi::to_widest (and1), 0,
+ TREE_OVERFLOW (and1));
return fold_build2_loc (loc, BIT_AND_EXPR, type,
- fold_convert_loc (loc, type, and0), tem);
+ fold_convert_loc (loc, type, and0), tem);
}
}
}
}
- tem = fold_convert_const (code, type, op0);
+ tem = fold_convert_const (code, type, arg0);
return tem ? tem : NULL_TREE;
case ADDR_SPACE_CONVERT_EXPR:
return tem ? tem : NULL_TREE;
case VIEW_CONVERT_EXPR:
- if (TREE_TYPE (op0) == type)
- return op0;
- if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
- return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
- type, TREE_OPERAND (op0, 0));
if (TREE_CODE (op0) == MEM_REF)
return fold_build2_loc (loc, MEM_REF, type,
TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
- /* For integral conversions with the same precision or pointer
- conversions use a NOP_EXPR instead. */
- if ((INTEGRAL_TYPE_P (type)
- || POINTER_TYPE_P (type))
- && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
- || POINTER_TYPE_P (TREE_TYPE (op0)))
- && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
- return fold_convert_loc (loc, type, op0);
-
- /* Strip inner integral conversions that do not change the precision. */
- if (CONVERT_EXPR_P (op0)
- && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
- || POINTER_TYPE_P (TREE_TYPE (op0)))
- && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
- || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
- && (TYPE_PRECISION (TREE_TYPE (op0))
- == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
- return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
- type, TREE_OPERAND (op0, 0));
-
return fold_view_convert_expr (type, op0);
case NEGATE_EXPR:
case ABS_EXPR:
if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
return fold_abs_const (arg0, type);
- else if (TREE_CODE (arg0) == NEGATE_EXPR)
- return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
/* Convert fabs((double)float) into (double)fabsf(float). */
else if (TREE_CODE (arg0) == NOP_EXPR
&& TREE_CODE (type) == REAL_TYPE)
/* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
else if (TREE_CODE (arg0) == ABS_EXPR)
return arg0;
- else if (tree_expr_nonnegative_p (arg0))
- return arg0;
/* Strip sign ops from argument. */
if (TREE_CODE (type) == REAL_TYPE)
case BIT_NOT_EXPR:
if (TREE_CODE (arg0) == INTEGER_CST)
return fold_not_const (arg0, type);
- else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
- return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
/* Convert ~ (-A) to A - 1. */
else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
return fold_build2_loc (loc, MINUS_EXPR, type,
if (i == count)
return build_vector (type, elements);
}
- else if (COMPARISON_CLASS_P (arg0)
- && (VECTOR_TYPE_P (type)
- || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
- {
- tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
- enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
- HONOR_NANS (TYPE_MODE (op_type)));
- if (subcode != ERROR_MARK)
- return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg0, 1));
- }
-
return NULL_TREE;
case REALPART_EXPR:
if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
return fold_convert_loc (loc, type, arg0);
- if (TREE_CODE (arg0) == COMPLEX_EXPR)
- return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg0, 1));
if (TREE_CODE (arg0) == COMPLEX_CST)
return fold_convert_loc (loc, type, TREE_REALPART (arg0));
if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
case IMAGPART_EXPR:
if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
return build_zero_cst (type);
- if (TREE_CODE (arg0) == COMPLEX_EXPR)
- return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg0, 0));
if (TREE_CODE (arg0) == COMPLEX_CST)
return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
case REDUC_MAX_EXPR:
case REDUC_PLUS_EXPR:
{
- unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
+ unsigned int nelts, i;
tree *elts;
enum tree_code subcode;
if (TREE_CODE (op0) != VECTOR_CST)
return NULL_TREE;
+ nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (op0));
elts = XALLOCAVEC (tree, nelts);
if (!vec_cst_ctor_to_array (op0, elts))
elts[0] = const_binop (subcode, elts[0], elts[i]);
if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
return NULL_TREE;
- elts[i] = build_zero_cst (TREE_TYPE (type));
}
- return build_vector (type, elts);
+ return elts[0];
}
default:
static bool
pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
{
- double_int di_offset, total;
-
if (!POINTER_TYPE_P (TREE_TYPE (base)))
return true;
if (bitpos < 0)
return true;
+ wide_int wi_offset;
+ int precision = TYPE_PRECISION (TREE_TYPE (base));
if (offset == NULL_TREE)
- di_offset = double_int_zero;
+ wi_offset = wi::zero (precision);
else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
return true;
else
- di_offset = TREE_INT_CST (offset);
+ wi_offset = offset;
bool overflow;
- double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
- total = di_offset.add_with_sign (units, true, &overflow);
+ wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
+ wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
if (overflow)
return true;
- if (total.high != 0)
+ if (!wi::fits_uhwi_p (total))
return true;
HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
size = base_size;
}
- return total.low > (unsigned HOST_WIDE_INT) size;
+ return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
+}
+
+/* Return the HOST_WIDE_INT least significant bits of T, a sizetype
+ kind INTEGER_CST. This makes sure to properly sign-extend the
+ constant. */
+
+static HOST_WIDE_INT
+size_low_cst (const_tree t)
+{
+ HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
+ int prec = TYPE_PRECISION (TREE_TYPE (t));
+ if (prec < HOST_BITS_PER_WIDE_INT)
+ return sext_hwi (w, prec);
+ return w;
}
/* Subroutine of fold_binary. This routine performs all of the
fold_comparison (location_t loc, enum tree_code code, tree type,
tree op0, tree op1)
{
+ const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
tree arg0, arg1, tem;
arg0 = op0;
STRIP_SIGN_NOPS (arg0);
STRIP_SIGN_NOPS (arg1);
- tem = fold_relational_const (code, type, arg0, arg1);
- if (tem != NULL_TREE)
- return tem;
-
- /* If one arg is a real or integer constant, put it last. */
- if (tree_swap_operands_p (arg0, arg1, true))
- return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
-
- /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
+ /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
- && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
- && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
- && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
- && (TREE_CODE (arg1) == INTEGER_CST
- && !TREE_OVERFLOW (arg1)))
+ && (equality_code || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
+ && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
+ && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
+ && TREE_CODE (arg1) == INTEGER_CST
+ && !TREE_OVERFLOW (arg1))
{
+ const enum tree_code
+ reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
tree const1 = TREE_OPERAND (arg0, 1);
- tree const2 = arg1;
+ tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
tree variable = TREE_OPERAND (arg0, 0);
- tree lhs;
- int lhs_add;
- lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
-
- lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
- TREE_TYPE (arg1), const2, const1);
+ tree new_const = int_const_binop (reverse_op, const2, const1);
/* If the constant operation overflowed this can be
simplified as a comparison against INT_MAX/INT_MIN. */
- if (TREE_CODE (lhs) == INTEGER_CST
- && TREE_OVERFLOW (lhs))
+ if (TREE_OVERFLOW (new_const)
+ && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
{
int const1_sgn = tree_int_cst_sgn (const1);
enum tree_code code2 = code;
/* We now can look at the canonicalized case
VARIABLE + 1 CODE2 INT_MIN
and decide on the result. */
- if (code2 == LT_EXPR
- || code2 == LE_EXPR
- || code2 == EQ_EXPR)
- return omit_one_operand_loc (loc, type, boolean_false_node, variable);
- else if (code2 == NE_EXPR
- || code2 == GE_EXPR
- || code2 == GT_EXPR)
- return omit_one_operand_loc (loc, type, boolean_true_node, variable);
- }
+ switch (code2)
+ {
+ case EQ_EXPR:
+ case LT_EXPR:
+ case LE_EXPR:
+ return
+ omit_one_operand_loc (loc, type, boolean_false_node, variable);
- if (TREE_CODE (lhs) == TREE_CODE (arg1)
- && (TREE_CODE (lhs) != INTEGER_CST
- || !TREE_OVERFLOW (lhs)))
+ case NE_EXPR:
+ case GE_EXPR:
+ case GT_EXPR:
+ return
+ omit_one_operand_loc (loc, type, boolean_true_node, variable);
+
+ default:
+ gcc_unreachable ();
+ }
+ }
+ else
{
- if (code != EQ_EXPR && code != NE_EXPR)
+ if (!equality_code)
fold_overflow_warning ("assuming signed overflow does not occur "
"when changing X +- C1 cmp C2 to "
- "X cmp C1 +- C2",
+ "X cmp C2 -+ C1",
WARN_STRICT_OVERFLOW_COMPARISON);
- return fold_build2_loc (loc, code, type, variable, lhs);
+ return fold_build2_loc (loc, code, type, variable, new_const);
}
}
+ /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
+ if (TREE_CODE (arg0) == MINUS_EXPR
+ && equality_code
+ && integer_zerop (arg1))
+ {
+ /* ??? The transformation is valid for the other operators if overflow
+ is undefined for the type, but performing it here badly interacts
+ with the transformation in fold_cond_expr_with_comparison which
+ attempts to synthetize ABS_EXPR. */
+ if (!equality_code)
+ fold_overflow_warning ("assuming signed overflow does not occur "
+ "when changing X - Y cmp 0 to X cmp Y",
+ WARN_STRICT_OVERFLOW_COMPARISON);
+ return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg0, 1));
+ }
+
/* For comparisons of pointers we can decompose it to a compile time
comparison of the base objects and the offsets into the object.
This requires at least one operand being an ADDR_EXPR or a
{
tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
- enum machine_mode mode;
+ machine_mode mode;
int volatilep, unsignedp;
bool indirect_base0 = false, indirect_base1 = false;
indirect_base0 = true;
}
offset0 = TREE_OPERAND (arg0, 1);
- if (host_integerp (offset0, 0))
+ if (tree_fits_shwi_p (offset0))
{
HOST_WIDE_INT off = size_low_cst (offset0);
if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
indirect_base1 = true;
}
offset1 = TREE_OPERAND (arg1, 1);
- if (host_integerp (offset1, 0))
+ if (tree_fits_shwi_p (offset1))
{
HOST_WIDE_INT off = size_low_cst (offset1);
if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
|| POINTER_TYPE_OVERFLOW_UNDEFINED))
{
- if (code != EQ_EXPR
- && code != NE_EXPR
+ if (!equality_code
&& bitpos0 != bitpos1
&& (pointer_may_wrap_p (base0, offset0, bitpos0)
|| pointer_may_wrap_p (base1, offset1, bitpos1)))
object and overflow on pointer differences is undefined as of
6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
else if (bitpos0 == bitpos1
- && ((code == EQ_EXPR || code == NE_EXPR)
+ && (equality_code
|| (indirect_base0 && DECL_P (base0))
|| POINTER_TYPE_OVERFLOW_UNDEFINED))
{
else
offset1 = fold_convert_loc (loc, ssizetype, offset1);
- if (code != EQ_EXPR
- && code != NE_EXPR
+ if (!equality_code
&& (pointer_may_wrap_p (base0, offset0, bitpos0)
|| pointer_may_wrap_p (base1, offset1, bitpos1)))
fold_overflow_warning (("assuming pointer wraparound does not "
/* Transform comparisons of the form X +- C1 CMP Y +- C2 to
X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
the resulting offset is smaller in absolute value than the
- original one. */
+ original one and has the same sign. */
if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
&& (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
&& (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
"a comparison");
/* Put the constant on the side where it doesn't overflow and is
- of lower absolute value than before. */
+ of lower absolute value and of same sign than before. */
cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
? MINUS_EXPR : PLUS_EXPR,
const2, const1);
if (!TREE_OVERFLOW (cst)
- && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
+ && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
+ && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
{
fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
return fold_build2_loc (loc, code, type,
- variable1,
- fold_build2_loc (loc,
- TREE_CODE (arg1), TREE_TYPE (arg1),
- variable2, cst));
+ variable1,
+ fold_build2_loc (loc, TREE_CODE (arg1),
+ TREE_TYPE (arg1),
+ variable2, cst));
}
cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
? MINUS_EXPR : PLUS_EXPR,
const1, const2);
if (!TREE_OVERFLOW (cst)
- && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
+ && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
+ && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
{
fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
return fold_build2_loc (loc, code, type,
- fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
- variable1, cst),
- variable2);
+ fold_build2_loc (loc, TREE_CODE (arg0),
+ TREE_TYPE (arg0),
+ variable1, cst),
+ variable2);
}
}
{
REAL_VALUE_TYPE r;
tree unit_type, *elts;
- enum machine_mode mode;
+ machine_mode mode;
unsigned vec_nelts, i;
switch (TREE_CODE (cst))
/* Mask out the tz least significant bits of X of type TYPE where
tz is the number of trailing zeroes in Y. */
-static double_int
-mask_with_tz (tree type, double_int x, double_int y)
+static wide_int
+mask_with_tz (tree type, const wide_int &x, const wide_int &y)
{
- int tz = y.trailing_zeros ();
-
+ int tz = wi::ctz (y);
if (tz > 0)
+ return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
+ return x;
+}
+
+/* Return true when T is an address and is known to be nonzero.
+ For floating point we further ensure that T is not denormal.
+ Similar logic is present in nonzero_address in rtlanal.h.
+
+ If the return value is based on the assumption that signed overflow
+ is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
+ change *STRICT_OVERFLOW_P. */
+
+static bool
+tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
+{
+ tree type = TREE_TYPE (t);
+ enum tree_code code;
+
+ /* Doing something useful for floating point would need more work. */
+ if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
+ return false;
+
+ code = TREE_CODE (t);
+ switch (TREE_CODE_CLASS (code))
{
- double_int mask;
+ case tcc_unary:
+ return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
+ strict_overflow_p);
+ case tcc_binary:
+ case tcc_comparison:
+ return tree_binary_nonzero_warnv_p (code, type,
+ TREE_OPERAND (t, 0),
+ TREE_OPERAND (t, 1),
+ strict_overflow_p);
+ case tcc_constant:
+ case tcc_declaration:
+ case tcc_reference:
+ return tree_single_nonzero_warnv_p (t, strict_overflow_p);
- mask = ~double_int::mask (tz);
- mask = mask.ext (TYPE_PRECISION (type), TYPE_UNSIGNED (type));
- return mask & x;
+ default:
+ break;
}
- return x;
+
+ switch (code)
+ {
+ case TRUTH_NOT_EXPR:
+ return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
+ strict_overflow_p);
+
+ case TRUTH_AND_EXPR:
+ case TRUTH_OR_EXPR:
+ case TRUTH_XOR_EXPR:
+ return tree_binary_nonzero_warnv_p (code, type,
+ TREE_OPERAND (t, 0),
+ TREE_OPERAND (t, 1),
+ strict_overflow_p);
+
+ case COND_EXPR:
+ case CONSTRUCTOR:
+ case OBJ_TYPE_REF:
+ case ASSERT_EXPR:
+ case ADDR_EXPR:
+ case WITH_SIZE_EXPR:
+ case SSA_NAME:
+ return tree_single_nonzero_warnv_p (t, strict_overflow_p);
+
+ case COMPOUND_EXPR:
+ case MODIFY_EXPR:
+ case BIND_EXPR:
+ return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
+ strict_overflow_p);
+
+ case SAVE_EXPR:
+ return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
+ strict_overflow_p);
+
+ case CALL_EXPR:
+ {
+ tree fndecl = get_callee_fndecl (t);
+ if (!fndecl) return false;
+ if (flag_delete_null_pointer_checks && !flag_check_new
+ && DECL_IS_OPERATOR_NEW (fndecl)
+ && !TREE_NOTHROW (fndecl))
+ return true;
+ if (flag_delete_null_pointer_checks
+ && lookup_attribute ("returns_nonnull",
+ TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
+ return true;
+ return alloca_call_p (t);
+ }
+
+ default:
+ break;
+ }
+ return false;
+}
+
+/* Return true when T is an address and is known to be nonzero.
+ Handle warnings about undefined signed overflow. */
+
+static bool
+tree_expr_nonzero_p (tree t)
+{
+ bool ret, strict_overflow_p;
+
+ strict_overflow_p = false;
+ ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
+ if (strict_overflow_p)
+ fold_overflow_warning (("assuming signed overflow does not occur when "
+ "determining that expression is always "
+ "non-zero"),
+ WARN_STRICT_OVERFLOW_MISC);
+ return ret;
}
/* Fold a binary expression of code CODE and type TYPE with operands
&& tree_swap_operands_p (arg0, arg1, true))
return fold_build2_loc (loc, code, type, op1, op0);
+ /* Likewise if this is a comparison, and ARG0 is a constant, move it
+ to ARG1 to reduce the number of tests below. */
+ if (kind == tcc_comparison
+ && tree_swap_operands_p (arg0, arg1, true))
+ return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
+
+ tem = generic_simplify (loc, code, type, op0, op1);
+ if (tem)
+ return tem;
+
/* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
First check for cases where an arithmetic operation is applied to a
return NULL_TREE;
case POINTER_PLUS_EXPR:
- /* 0 +p index -> (type)index */
- if (integer_zerop (arg0))
- return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
-
- /* PTR +p 0 -> PTR */
- if (integer_zerop (arg1))
- return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
-
/* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
&& INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
fold_convert_loc (loc, sizetype,
arg0)));
- /* (PTR +p B) +p A -> PTR +p (B + A) */
- if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
- {
- tree inner;
- tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
- tree arg00 = TREE_OPERAND (arg0, 0);
- inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
- arg01, fold_convert_loc (loc, sizetype, arg1));
- return fold_convert_loc (loc, type,
- fold_build_pointer_plus_loc (loc,
- arg00, inner));
- }
-
/* PTR_CST +p CST -> CST1 */
if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
fold_convert_loc (loc, type, arg1));
- /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
- of the array. Loop optimizer sometimes produce this type of
- expressions. */
- if (TREE_CODE (arg0) == ADDR_EXPR)
- {
- tem = try_move_mult_to_index (loc, arg0,
- fold_convert_loc (loc,
- ssizetype, arg1));
- if (tem)
- return fold_convert_loc (loc, type, tem);
- }
-
return NULL_TREE;
case PLUS_EXPR:
- /* A + (-B) -> A - B */
- if (TREE_CODE (arg1) == NEGATE_EXPR)
- return fold_build2_loc (loc, MINUS_EXPR, type,
- fold_convert_loc (loc, type, arg0),
- fold_convert_loc (loc, type,
- TREE_OPERAND (arg1, 0)));
- /* (-A) + B -> B - A */
- if (TREE_CODE (arg0) == NEGATE_EXPR
- && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
- return fold_build2_loc (loc, MINUS_EXPR, type,
- fold_convert_loc (loc, type, arg1),
- fold_convert_loc (loc, type,
- TREE_OPERAND (arg0, 0)));
-
if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
{
- /* Convert ~A + 1 to -A. */
- if (TREE_CODE (arg0) == BIT_NOT_EXPR
- && integer_onep (arg1))
- return fold_build1_loc (loc, NEGATE_EXPR, type,
- fold_convert_loc (loc, type,
- TREE_OPERAND (arg0, 0)));
-
- /* ~X + X is -1. */
- if (TREE_CODE (arg0) == BIT_NOT_EXPR
- && !TYPE_OVERFLOW_TRAPS (type))
- {
- tree tem = TREE_OPERAND (arg0, 0);
-
- STRIP_NOPS (tem);
- if (operand_equal_p (tem, arg1, 0))
- {
- t1 = build_all_ones_cst (type);
- return omit_one_operand_loc (loc, type, t1, arg1);
- }
- }
-
- /* X + ~X is -1. */
- if (TREE_CODE (arg1) == BIT_NOT_EXPR
- && !TYPE_OVERFLOW_TRAPS (type))
- {
- tree tem = TREE_OPERAND (arg1, 0);
-
- STRIP_NOPS (tem);
- if (operand_equal_p (arg0, tem, 0))
- {
- t1 = build_all_ones_cst (type);
- return omit_one_operand_loc (loc, type, t1, arg0);
- }
- }
-
/* X + (X / CST) * -CST is X % CST. */
if (TREE_CODE (arg1) == MULT_EXPR
&& TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
if (! FLOAT_TYPE_P (type))
{
- if (integer_zerop (arg1))
- return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
-
/* If we are adding two BIT_AND_EXPR's, both of which are and'ing
with a constant, and the two constants have no bits in common,
we should treat this as a BIT_IOR_EXPR since this may produce more
&& TREE_CODE (arg1) == BIT_AND_EXPR
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
&& TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
- && integer_zerop (const_binop (BIT_AND_EXPR,
- TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 1))))
+ && wi::bit_and (TREE_OPERAND (arg0, 1),
+ TREE_OPERAND (arg1, 1)) == 0)
{
code = BIT_IOR_EXPR;
goto bit_ior;
code11 = TREE_CODE (tree11);
if (code01 == INTEGER_CST
&& code11 == INTEGER_CST
- && TREE_INT_CST_HIGH (tree01) == 0
- && TREE_INT_CST_HIGH (tree11) == 0
- && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
+ && (wi::to_widest (tree01) + wi::to_widest (tree11)
== element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
{
tem = build2_loc (loc, LROTATE_EXPR,
if (tmp)
return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
}
+ /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
+ simplifies. */
+ else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
+ {
+ tree arg10 = fold_convert_loc (loc, type,
+ TREE_OPERAND (arg1, 0));
+ tree arg11 = fold_convert_loc (loc, type,
+ TREE_OPERAND (arg1, 1));
+ tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
+ fold_convert_loc (loc, type, arg0),
+ arg10);
+ if (tmp)
+ return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
+ }
}
- /* A - (-B) -> A + B */
- if (TREE_CODE (arg1) == NEGATE_EXPR)
- return fold_build2_loc (loc, PLUS_EXPR, type, op0,
- fold_convert_loc (loc, type,
- TREE_OPERAND (arg1, 0)));
/* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
if (TREE_CODE (arg0) == NEGATE_EXPR
&& negate_expr_p (arg1)
fold_convert_loc (loc, type,
TREE_OPERAND (arg0, 0)));
/* Convert -A - 1 to ~A. */
- if (TREE_CODE (type) != COMPLEX_TYPE
- && TREE_CODE (arg0) == NEGATE_EXPR
- && integer_onep (arg1)
+ if (TREE_CODE (arg0) == NEGATE_EXPR
+ && integer_each_onep (arg1)
&& !TYPE_OVERFLOW_TRAPS (type))
return fold_build1_loc (loc, BIT_NOT_EXPR, type,
fold_convert_loc (loc, type,
{
if (integer_zerop (arg0))
return negate_expr (fold_convert_loc (loc, type, arg1));
- if (integer_zerop (arg1))
- return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
/* Fold A - (A & B) into ~B & A. */
if (!TREE_SIDE_EFFECTS (arg0)
}
}
- /* Fold &x - &x. This can happen from &x.foo - &x.
- This is unsafe for certain floats even in non-IEEE formats.
- In IEEE, it is unsafe because it does wrong for NaNs.
- Also note that operand_equal_p is always false if an operand
- is volatile. */
-
- if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
- && operand_equal_p (arg0, arg1, 0))
- return build_zero_cst (type);
-
/* A - B -> A + (-B) if B is easily negatable. */
if (negate_expr_p (arg1)
+ && !TYPE_OVERFLOW_SANITIZED (type)
&& ((FLOAT_TYPE_P (type)
/* Avoid this transformation if B is a positive REAL_CST. */
&& (TREE_CODE (arg1) != REAL_CST
if (! FLOAT_TYPE_P (type))
{
- if (integer_zerop (arg1))
- return omit_one_operand_loc (loc, type, arg1, arg0);
- if (integer_onep (arg1))
- return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
/* Transform x * -1 into -x. Make sure to do the negation
on the original operand with conversions not stripped
because we can only strip non-sign-changing conversions. */
case BIT_IOR_EXPR:
bit_ior:
- if (integer_all_onesp (arg1))
- return omit_one_operand_loc (loc, type, arg1, arg0);
- if (integer_zerop (arg1))
- return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
- if (operand_equal_p (arg0, arg1, 0))
- return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
-
/* ~X | X is -1. */
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
&& TREE_CODE (arg1) == INTEGER_CST
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
{
- double_int c1, c2, c3, msk;
int width = TYPE_PRECISION (type), w;
- bool try_simplify = true;
-
- c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
- c2 = tree_to_double_int (arg1);
+ wide_int c1 = TREE_OPERAND (arg0, 1);
+ wide_int c2 = arg1;
/* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
if ((c1 & c2) == c1)
return omit_one_operand_loc (loc, type, arg1,
TREE_OPERAND (arg0, 0));
- msk = double_int::mask (width);
+ wide_int msk = wi::mask (width, false,
+ TYPE_PRECISION (TREE_TYPE (arg1)));
/* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
- if (msk.and_not (c1 | c2).is_zero ())
+ if (msk.and_not (c1 | c2) == 0)
return fold_build2_loc (loc, BIT_IOR_EXPR, type,
TREE_OPERAND (arg0, 0), arg1);
mode which allows further optimizations. */
c1 &= msk;
c2 &= msk;
- c3 = c1.and_not (c2);
- for (w = BITS_PER_UNIT;
- w <= width && w <= HOST_BITS_PER_WIDE_INT;
- w <<= 1)
+ wide_int c3 = c1.and_not (c2);
+ for (w = BITS_PER_UNIT; w <= width; w <<= 1)
{
- unsigned HOST_WIDE_INT mask
- = HOST_WIDE_INT_M1U >> (HOST_BITS_PER_WIDE_INT - w);
- if (((c1.low | c2.low) & mask) == mask
- && (c1.low & ~mask) == 0 && c1.high == 0)
+ wide_int mask = wi::mask (w, false,
+ TYPE_PRECISION (type));
+ if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
{
- c3 = double_int::from_uhwi (mask);
+ c3 = mask;
break;
}
}
- /* If X is a tree of the form (Y * K1) & K2, this might conflict
- with that optimization from the BIT_AND_EXPR optimizations.
- This could end up in an infinite recursion. */
- if (TREE_CODE (TREE_OPERAND (arg0, 0)) == MULT_EXPR
- && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
- == INTEGER_CST)
- {
- tree t = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
- double_int masked = mask_with_tz (type, c3, tree_to_double_int (t));
-
- try_simplify = (masked != c1);
- }
-
- if (try_simplify && c3 != c1)
+ if (c3 != c1)
return fold_build2_loc (loc, BIT_IOR_EXPR, type,
fold_build2_loc (loc, BIT_AND_EXPR, type,
TREE_OPERAND (arg0, 0),
- double_int_to_tree (type,
- c3)),
+ wide_int_to_tree (type,
+ c3)),
arg1);
}
- /* (X & Y) | Y is (X, Y). */
- if (TREE_CODE (arg0) == BIT_AND_EXPR
- && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
- return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
- /* (X & Y) | X is (Y, X). */
- if (TREE_CODE (arg0) == BIT_AND_EXPR
- && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
- && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
- return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
- /* X | (X & Y) is (Y, X). */
- if (TREE_CODE (arg1) == BIT_AND_EXPR
- && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
- && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
- return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
- /* X | (Y & X) is (Y, X). */
- if (TREE_CODE (arg1) == BIT_AND_EXPR
- && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
- && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
- return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
-
/* (X & ~Y) | (~X & Y) is X ^ Y */
if (TREE_CODE (arg0) == BIT_AND_EXPR
&& TREE_CODE (arg1) == BIT_AND_EXPR)
goto bit_rotate;
case BIT_XOR_EXPR:
- if (integer_zerop (arg1))
- return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
- if (integer_all_onesp (arg1))
- return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
- if (operand_equal_p (arg0, arg1, 0))
- return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
-
/* ~X ^ X is -1. */
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
&& TREE_CODE (arg1) == BIT_AND_EXPR
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
&& TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
- && integer_zerop (const_binop (BIT_AND_EXPR,
- TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 1))))
+ && wi::bit_and (TREE_OPERAND (arg0, 1),
+ TREE_OPERAND (arg1, 1)) == 0)
{
code = BIT_IOR_EXPR;
goto bit_ior;
/* Fold (X & 1) ^ 1 as (X & 1) == 0. */
if (TREE_CODE (arg0) == BIT_AND_EXPR
+ && INTEGRAL_TYPE_P (type)
&& integer_onep (TREE_OPERAND (arg0, 1))
&& integer_onep (arg1))
return fold_build2_loc (loc, EQ_EXPR, type, arg0,
goto bit_rotate;
case BIT_AND_EXPR:
- if (integer_all_onesp (arg1))
- return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
- if (integer_zerop (arg1))
- return omit_one_operand_loc (loc, type, arg1, arg0);
- if (operand_equal_p (arg0, arg1, 0))
- return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
-
/* ~X & X, (X == 0) & X, and !X & X are always zero. */
if ((TREE_CODE (arg0) == BIT_NOT_EXPR
|| TREE_CODE (arg0) == TRUTH_NOT_EXPR
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
- /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
- if (TREE_CODE (arg0) == BIT_IOR_EXPR
- && TREE_CODE (arg1) == INTEGER_CST
- && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
- {
- tree tmp1 = fold_convert_loc (loc, type, arg1);
- tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
- tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
- tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
- tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
- return
- fold_convert_loc (loc, type,
- fold_build2_loc (loc, BIT_IOR_EXPR,
- type, tmp2, tmp3));
- }
-
- /* (X | Y) & Y is (X, Y). */
- if (TREE_CODE (arg0) == BIT_IOR_EXPR
- && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
- return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
- /* (X | Y) & X is (Y, X). */
- if (TREE_CODE (arg0) == BIT_IOR_EXPR
- && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
- && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
- return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
- /* X & (X | Y) is (Y, X). */
- if (TREE_CODE (arg1) == BIT_IOR_EXPR
- && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
- && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
- return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
- /* X & (Y | X) is (Y, X). */
- if (TREE_CODE (arg1) == BIT_IOR_EXPR
- && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
- && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
- return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
-
/* Fold (X ^ 1) & 1 as (X & 1) == 0. */
if (TREE_CODE (arg0) == BIT_XOR_EXPR
+ && INTEGRAL_TYPE_P (type)
&& integer_onep (TREE_OPERAND (arg0, 1))
&& integer_onep (arg1))
{
}
/* Fold ~X & 1 as (X & 1) == 0. */
if (TREE_CODE (arg0) == BIT_NOT_EXPR
+ && INTEGRAL_TYPE_P (type)
&& integer_onep (arg1))
{
tree tem2;
multiple of 1 << CST. */
if (TREE_CODE (arg1) == INTEGER_CST)
{
- double_int cst1 = tree_to_double_int (arg1);
- double_int ncst1 = (-cst1).ext (TYPE_PRECISION (TREE_TYPE (arg1)),
- TYPE_UNSIGNED (TREE_TYPE (arg1)));
+ wide_int cst1 = arg1;
+ wide_int ncst1 = -cst1;
if ((cst1 & ncst1) == ncst1
&& multiple_of_p (type, arg0,
- double_int_to_tree (TREE_TYPE (arg1), ncst1)))
+ wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
return fold_convert_loc (loc, type, arg0);
}
&& TREE_CODE (arg0) == MULT_EXPR
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
{
- double_int masked
- = mask_with_tz (type, tree_to_double_int (arg1),
- tree_to_double_int (TREE_OPERAND (arg0, 1)));
+ wide_int warg1 = arg1;
+ wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
- if (masked.is_zero ())
+ if (masked == 0)
return omit_two_operands_loc (loc, type, build_zero_cst (type),
arg0, arg1);
- else if (masked != tree_to_double_int (arg1))
- return fold_build2_loc (loc, code, type, op0,
- double_int_to_tree (type, masked));
+ else if (masked != warg1)
+ {
+ /* Avoid the transform if arg1 is a mask of some
+ mode which allows further optimizations. */
+ int pop = wi::popcount (warg1);
+ if (!(pop >= BITS_PER_UNIT
+ && exact_log2 (pop) != -1
+ && wi::mask (pop, false, warg1.get_precision ()) == warg1))
+ return fold_build2_loc (loc, code, type, op0,
+ wide_int_to_tree (type, masked));
+ }
}
/* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
and for - instead of + (or unary - instead of +)
and/or ^ instead of |.
If B is constant and (B & M) == 0, fold into A & M. */
- if (host_integerp (arg1, 1))
+ if (TREE_CODE (arg1) == INTEGER_CST)
{
- unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
- if (~cst1 && (cst1 & (cst1 + 1)) == 0
+ wide_int cst1 = arg1;
+ if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
&& INTEGRAL_TYPE_P (TREE_TYPE (arg0))
&& (TREE_CODE (arg0) == PLUS_EXPR
|| TREE_CODE (arg0) == MINUS_EXPR
{
tree pmop[2];
int which = 0;
- unsigned HOST_WIDE_INT cst0;
+ wide_int cst0;
/* Now we know that arg0 is (C + D) or (C - D) or
-C and arg1 (M) is == (1LL << cst) - 1.
which = 1;
}
- if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
- || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
- & cst1) != cst1)
+ if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
which = -1;
for (; which >= 0; which--)
if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
!= INTEGER_CST)
break;
- /* tree_low_cst not used, because we don't care about
- the upper bits. */
- cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
+ cst0 = TREE_OPERAND (pmop[which], 1);
cst0 &= cst1;
if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
{
omitted (assumed 0). */
if ((TREE_CODE (arg0) == PLUS_EXPR
|| (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
- && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
+ && (cst1 & pmop[which]) == 0)
pmop[which] = NULL;
break;
default:
{
prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
- if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
- && (~TREE_INT_CST_LOW (arg1)
- & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
+ wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
+ if (mask == -1)
return
fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
}
/* If arg0 is derived from the address of an object or function, we may
be able to fold this expression using the object or function's
alignment. */
- if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
+ if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
{
unsigned HOST_WIDE_INT modulus, residue;
- unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
+ unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
modulus = get_pointer_modulus_and_residue (arg0, &residue,
integer_onep (arg1));
if the new mask might be further optimized. */
if ((TREE_CODE (arg0) == LSHIFT_EXPR
|| TREE_CODE (arg0) == RSHIFT_EXPR)
- && host_integerp (TREE_OPERAND (arg0, 1), 1)
- && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
- && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
- < TYPE_PRECISION (TREE_TYPE (arg0))
&& TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
- && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
+ && TREE_CODE (arg1) == INTEGER_CST
+ && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
+ && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
+ && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
+ < TYPE_PRECISION (TREE_TYPE (arg0))))
{
- unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
- unsigned HOST_WIDE_INT mask
- = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
+ unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
+ unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
unsigned HOST_WIDE_INT newmask, zerobits = 0;
tree shift_type = TREE_TYPE (arg0);
zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
else if (TREE_CODE (arg0) == RSHIFT_EXPR
&& TYPE_PRECISION (TREE_TYPE (arg0))
- == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
+ == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
{
prec = TYPE_PRECISION (TREE_TYPE (arg0));
tree arg00 = TREE_OPERAND (arg0, 0);
{
tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
if (TYPE_PRECISION (inner_type)
- == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
+ == GET_MODE_PRECISION (TYPE_MODE (inner_type))
&& TYPE_PRECISION (inner_type) < prec)
{
prec = TYPE_PRECISION (inner_type);
/* See if we can shorten the right shift. */
if (shiftc < prec)
shift_type = inner_type;
+ /* Otherwise X >> C1 is all zeros, so we'll optimize
+ it into (X, 0) later on by making sure zerobits
+ is all ones. */
}
}
zerobits = ~(unsigned HOST_WIDE_INT) 0;
- zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
- zerobits <<= prec - shiftc;
+ if (shiftc < prec)
+ {
+ zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
+ zerobits <<= prec - shiftc;
+ }
/* For arithmetic shift if sign bit could be set, zerobits
can contain actually sign bits, so no transformation is
possible, unless MASK masks them all away. In that
/* ((X << 16) & 0xff00) is (X, 0). */
if ((mask & zerobits) == mask)
return omit_one_operand_loc (loc, type,
- build_int_cst (type, 0), arg0);
+ build_int_cst (type, 0), arg0);
newmask = mask | zerobits;
if (newmask != mask && (newmask & (newmask + 1)) == 0)
tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
arg1, TREE_OPERAND (arg0, 1));
if (sum && integer_zerop (sum)) {
- unsigned long pow2;
-
- if (TREE_INT_CST_LOW (arg1))
- pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
- else
- pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
- + HOST_BITS_PER_WIDE_INT;
-
+ tree pow2 = build_int_cst (integer_type_node,
+ wi::exact_log2 (arg1));
return fold_build2_loc (loc, RSHIFT_EXPR, type,
- TREE_OPERAND (arg0, 0),
- build_int_cst (integer_type_node, pow2));
+ TREE_OPERAND (arg0, 0), pow2);
}
}
if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
{
tree sh_cnt = TREE_OPERAND (arg1, 1);
- unsigned long pow2;
-
- if (TREE_INT_CST_LOW (sval))
- pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
- else
- pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
- + HOST_BITS_PER_WIDE_INT;
+ tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
+ wi::exact_log2 (sval));
if (strict_overflow_p)
fold_overflow_warning (("assuming signed overflow does not "
WARN_STRICT_OVERFLOW_MISC);
sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
- sh_cnt,
- build_int_cst (TREE_TYPE (sh_cnt),
- pow2));
+ sh_cnt, pow2);
return fold_build2_loc (loc, RSHIFT_EXPR, type,
- fold_convert_loc (loc, type, arg0), sh_cnt);
+ fold_convert_loc (loc, type, arg0), sh_cnt);
}
}
case ROUND_DIV_EXPR:
case CEIL_DIV_EXPR:
case EXACT_DIV_EXPR:
- if (integer_onep (arg1))
- return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
if (integer_zerop (arg1))
return NULL_TREE;
/* X / -1 is -X. */
if (!TYPE_UNSIGNED (type)
&& TREE_CODE (arg1) == INTEGER_CST
- && TREE_INT_CST_LOW (arg1) == HOST_WIDE_INT_M1U
- && TREE_INT_CST_HIGH (arg1) == -1)
+ && wi::eq_p (arg1, -1))
return fold_convert_loc (loc, type, negate_expr (arg0));
/* Convert -A / -B to A / B when the type is signed and overflow is
case FLOOR_MOD_EXPR:
case ROUND_MOD_EXPR:
case TRUNC_MOD_EXPR:
- /* X % 1 is always zero, but be sure to preserve any side
- effects in X. */
- if (integer_onep (arg1))
- return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
-
- /* X % 0, return X % 0 unchanged so that we can get the
- proper warnings and errors. */
- if (integer_zerop (arg1))
- return NULL_TREE;
-
- /* 0 % X is always zero, but be sure to preserve any side
- effects in X. Place this after checking for X == 0. */
- if (integer_zerop (arg0))
- return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
-
/* X % -1 is zero. */
if (!TYPE_UNSIGNED (type)
&& TREE_CODE (arg1) == INTEGER_CST
- && TREE_INT_CST_LOW (arg1) == HOST_WIDE_INT_M1U
- && TREE_INT_CST_HIGH (arg1) == -1)
+ && wi::eq_p (arg1, -1))
return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
/* X % -C is the same as X % C. */
if (code == TRUNC_MOD_EXPR
- && !TYPE_UNSIGNED (type)
+ && TYPE_SIGN (type) == SIGNED
&& TREE_CODE (arg1) == INTEGER_CST
&& !TREE_OVERFLOW (arg1)
- && TREE_INT_CST_HIGH (arg1) < 0
+ && wi::neg_p (arg1)
&& !TYPE_OVERFLOW_TRAPS (type)
/* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
&& !sign_bit_p (arg1, arg1))
prec = element_precision (type);
/* Turn (a OP c1) OP c2 into a OP (c1+c2). */
- if (TREE_CODE (op0) == code && host_integerp (arg1, true)
- && TREE_INT_CST_LOW (arg1) < prec
- && host_integerp (TREE_OPERAND (arg0, 1), true)
- && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < prec)
+ if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
+ && tree_to_uhwi (arg1) < prec
+ && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
+ && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
{
- unsigned int low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
- + TREE_INT_CST_LOW (arg1));
+ unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
+ + tree_to_uhwi (arg1));
/* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
being well defined. */
if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
|| (TYPE_UNSIGNED (type)
&& code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
- && host_integerp (arg1, false)
- && TREE_INT_CST_LOW (arg1) < prec
- && host_integerp (TREE_OPERAND (arg0, 1), false)
- && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < prec)
+ && tree_fits_uhwi_p (arg1)
+ && tree_to_uhwi (arg1) < prec
+ && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
+ && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
{
- HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
- HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
+ HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
+ HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
tree lshift;
tree arg00;
fold_build2_loc (loc, code, type,
TREE_OPERAND (arg0, 1), arg1));
- /* Two consecutive rotates adding up to the precision of the
- type can be ignored. */
+ /* Two consecutive rotates adding up to the some integer
+ multiple of the precision of the type can be ignored. */
if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
&& TREE_CODE (arg0) == RROTATE_EXPR
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
- && TREE_INT_CST_HIGH (arg1) == 0
- && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
- && ((TREE_INT_CST_LOW (arg1)
- + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
- == prec))
+ && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
+ prec) == 0)
return TREE_OPERAND (arg0, 0);
/* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
type);
}
- /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
- a MINUS_EXPR of a constant, we can convert it into a comparison with
- a revised constant as long as no overflow occurs. */
- if (TREE_CODE (arg1) == INTEGER_CST
- && (TREE_CODE (arg0) == PLUS_EXPR
- || TREE_CODE (arg0) == MINUS_EXPR)
- && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
- && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
- ? MINUS_EXPR : PLUS_EXPR,
- fold_convert_loc (loc, TREE_TYPE (arg0),
- arg1),
- TREE_OPERAND (arg0, 1)))
- && !TREE_OVERFLOW (tem))
- return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
-
/* Similarly for a NEGATE_EXPR. */
if (TREE_CODE (arg0) == NEGATE_EXPR
&& TREE_CODE (arg1) == INTEGER_CST
&& operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
1)),
arg1, 0)
- && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
+ && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
{
return omit_two_operands_loc (loc, type,
code == NE_EXPR
TREE_OPERAND (arg0, 1), arg1);
}
- /* If we have X - Y == 0, we can convert that to X == Y and similarly
- for !=. Don't do this for ordered comparisons due to overflow. */
- if (TREE_CODE (arg0) == MINUS_EXPR
- && integer_zerop (arg1))
- return fold_build2_loc (loc, code, type,
- TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
-
/* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
if (TREE_CODE (arg0) == ABS_EXPR
&& (integer_zerop (arg1) || real_zerop (arg1)))
prec = TYPE_PRECISION (itype);
/* Check for a valid shift count. */
- if (TREE_INT_CST_HIGH (arg001) == 0
- && TREE_INT_CST_LOW (arg001) < prec)
+ if (wi::ltu_p (arg001, prec))
{
tree arg01 = TREE_OPERAND (arg0, 1);
tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
tree arg00 = TREE_OPERAND (arg0, 0);
tree arg01 = TREE_OPERAND (arg0, 1);
tree itype = TREE_TYPE (arg00);
- if (TREE_INT_CST_HIGH (arg01) == 0
- && TREE_INT_CST_LOW (arg01)
- == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
+ if (wi::eq_p (arg01, element_precision (itype) - 1))
{
if (TYPE_UNSIGNED (itype))
{
the specified precision will have known values. */
{
tree arg1_type = TREE_TYPE (arg1);
- unsigned int width = TYPE_PRECISION (arg1_type);
+ unsigned int prec = TYPE_PRECISION (arg1_type);
if (TREE_CODE (arg1) == INTEGER_CST
- && width <= HOST_BITS_PER_DOUBLE_INT
&& (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
{
- HOST_WIDE_INT signed_max_hi;
- unsigned HOST_WIDE_INT signed_max_lo;
- unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
+ wide_int max = wi::max_value (arg1_type);
+ wide_int signed_max = wi::max_value (prec, SIGNED);
+ wide_int min = wi::min_value (arg1_type);
- if (width <= HOST_BITS_PER_WIDE_INT)
- {
- signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
- - 1;
- signed_max_hi = 0;
- max_hi = 0;
-
- if (TYPE_UNSIGNED (arg1_type))
- {
- max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
- min_lo = 0;
- min_hi = 0;
- }
- else
- {
- max_lo = signed_max_lo;
- min_lo = (HOST_WIDE_INT_M1U << (width - 1));
- min_hi = -1;
- }
- }
- else
- {
- width -= HOST_BITS_PER_WIDE_INT;
- signed_max_lo = -1;
- signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
- - 1;
- max_lo = -1;
- min_lo = 0;
-
- if (TYPE_UNSIGNED (arg1_type))
- {
- max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
- min_hi = 0;
- }
- else
- {
- max_hi = signed_max_hi;
- min_hi = (HOST_WIDE_INT_M1U << (width - 1));
- }
- }
-
- if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
- && TREE_INT_CST_LOW (arg1) == max_lo)
+ if (wi::eq_p (arg1, max))
switch (code)
{
case GT_EXPR:
default:
break;
}
- else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
- == max_hi
- && TREE_INT_CST_LOW (arg1) == max_lo - 1)
+ else if (wi::eq_p (arg1, max - 1))
switch (code)
{
case GT_EXPR:
default:
break;
}
- else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
- == min_hi
- && TREE_INT_CST_LOW (arg1) == min_lo)
+ else if (wi::eq_p (arg1, min))
switch (code)
{
case LT_EXPR:
default:
break;
}
- else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
- == min_hi
- && TREE_INT_CST_LOW (arg1) == min_lo + 1)
+ else if (wi::eq_p (arg1, min + 1))
switch (code)
{
case GE_EXPR:
- arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
+ arg1 = const_binop (MINUS_EXPR, arg1,
+ build_int_cst (TREE_TYPE (arg1), 1));
return fold_build2_loc (loc, NE_EXPR, type,
fold_convert_loc (loc,
TREE_TYPE (arg1), arg0),
arg1);
case LT_EXPR:
- arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
+ arg1 = const_binop (MINUS_EXPR, arg1,
+ build_int_cst (TREE_TYPE (arg1), 1));
return fold_build2_loc (loc, EQ_EXPR, type,
fold_convert_loc (loc, TREE_TYPE (arg1),
arg0),
break;
}
- else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
- && TREE_INT_CST_LOW (arg1) == signed_max_lo
+ else if (wi::eq_p (arg1, signed_max)
&& TYPE_UNSIGNED (arg1_type)
/* We will flip the signedness of the comparison operator
associated with the mode of arg1, so the sign bit is
specified by this mode. Check that arg1 is the signed
max associated with this sign bit. */
- && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
+ && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
/* signed_type does not work on pointer types. */
&& INTEGRAL_TYPE_P (arg1_type))
{
and X >= signed_max+1 because previous transformations. */
if (code == LE_EXPR || code == GT_EXPR)
{
- tree st;
- st = signed_type_for (TREE_TYPE (arg1));
+ tree st = signed_type_for (arg1_type);
return fold_build2_loc (loc,
code == LE_EXPR ? GE_EXPR : LT_EXPR,
type, fold_convert_loc (loc, st, arg0),
|| (TREE_CODE (arg0) == INTEGER_CST
&& TREE_CODE (arg1) == INTEGER_CST))
return build_complex (type, arg0, arg1);
- if (TREE_CODE (arg0) == REALPART_EXPR
- && TREE_CODE (arg1) == IMAGPART_EXPR
- && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
- && operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), 0))
- return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0));
return NULL_TREE;
case ASSERT_EXPR:
gcc_assert (IS_EXPR_CODE_CLASS (kind)
&& TREE_CODE_LENGTH (code) == 3);
+ /* If this is a commutative operation, and OP0 is a constant, move it
+ to OP1 to reduce the number of tests below. */
+ if (commutative_ternary_tree_code (code)
+ && tree_swap_operands_p (op0, op1, true))
+ return fold_build3_loc (loc, code, type, op1, op0, op2);
+
+ tem = generic_simplify (loc, code, type, op0, op1, op2);
+ if (tem)
+ return tem;
+
/* Strip any conversions that don't change the mode. This is safe
for every expression, except for a comparison expression because
its signedness is derived from its operands. So, in the latter
}
else if (TREE_CODE (arg0) == VECTOR_CST)
{
- if (integer_all_onesp (arg0))
- return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
- if (integer_zerop (arg0))
- return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
-
if ((TREE_CODE (arg1) == VECTOR_CST
|| TREE_CODE (arg1) == CONSTRUCTOR)
&& (TREE_CODE (arg2) == VECTOR_CST
}
}
- if (operand_equal_p (arg1, op2, 0))
- return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
-
/* If we have A op B ? A : C, we may be able to convert this to a
simpler expression, depending on the operation and the values
of B and C. Signed zeros prevent all of these transformations,
&& TYPE_PRECISION (TREE_TYPE (tem))
< TYPE_PRECISION (type))
{
- unsigned HOST_WIDE_INT mask_lo;
- HOST_WIDE_INT mask_hi;
int inner_width, outer_width;
tree tem_type;
if (outer_width > TYPE_PRECISION (type))
outer_width = TYPE_PRECISION (type);
- if (outer_width > HOST_BITS_PER_WIDE_INT)
- {
- mask_hi = (HOST_WIDE_INT_M1U
- >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
- mask_lo = -1;
- }
- else
- {
- mask_hi = 0;
- mask_lo = (HOST_WIDE_INT_M1U
- >> (HOST_BITS_PER_WIDE_INT - outer_width));
- }
- if (inner_width > HOST_BITS_PER_WIDE_INT)
- {
- mask_hi &= ~(HOST_WIDE_INT_M1U
- >> (HOST_BITS_PER_WIDE_INT - inner_width));
- mask_lo = 0;
- }
- else
- mask_lo &= ~(HOST_WIDE_INT_M1U
- >> (HOST_BITS_PER_WIDE_INT - inner_width));
+ wide_int mask = wi::shifted_mask
+ (inner_width, outer_width - inner_width, false,
+ TYPE_PRECISION (TREE_TYPE (arg1)));
- if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
- && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
+ wide_int common = mask & arg1;
+ if (common == mask)
{
tem_type = signed_type_for (TREE_TYPE (tem));
tem = fold_convert_loc (loc, tem_type, tem);
}
- else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
- && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
+ else if (common == 0)
{
tem_type = unsigned_type_for (TREE_TYPE (tem));
tem = fold_convert_loc (loc, tem_type, tem);
tree tem = TREE_OPERAND (arg0, 0);
STRIP_NOPS (tem);
if (TREE_CODE (tem) == RSHIFT_EXPR
- && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
+ && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
&& (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
- TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
+ tree_to_uhwi (TREE_OPERAND (tem, 1)))
return fold_build2_loc (loc, BIT_AND_EXPR, type,
TREE_OPERAND (tem, 0), arg1);
}
&& TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
{
tree eltype = TREE_TYPE (TREE_TYPE (arg0));
- unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
- unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
- unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
+ unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
+ unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
+ unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
if (n != 0
&& (idx % width) == 0
/* A bit-field-ref that referenced the full argument can be stripped. */
if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
- && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
+ && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
&& integer_zerop (op2))
return fold_convert_loc (loc, type, arg0);
fold (nearly) all BIT_FIELD_REFs. */
if (CONSTANT_CLASS_P (arg0)
&& can_native_interpret_type_p (type)
- && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
+ && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
/* This limitation should not be necessary, we just need to
round this up to mode size. */
- && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
+ && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
/* Need bit-shifting of the buffer to relax the following. */
- && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
+ && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
{
- unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
- unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
+ unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
+ unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
unsigned HOST_WIDE_INT clen;
- clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
+ clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
/* ??? We cannot tell native_encode_expr to start at
some random byte only. So limit us to a reasonable amount
of work. */
case VEC_PERM_EXPR:
if (TREE_CODE (arg2) == VECTOR_CST)
{
- unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
- unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
- tree t;
+ unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
+ unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
+ unsigned char *sel2 = sel + nelts;
bool need_mask_canon = false;
+ bool need_mask_canon2 = false;
bool all_in_vec0 = true;
bool all_in_vec1 = true;
bool maybe_identity = true;
bool single_arg = (op0 == op1);
bool changed = false;
- mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
+ mask2 = 2 * nelts - 1;
+ mask = single_arg ? (nelts - 1) : mask2;
gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
for (i = 0; i < nelts; i++)
{
if (TREE_CODE (val) != INTEGER_CST)
return NULL_TREE;
- sel[i] = TREE_INT_CST_LOW (val) & mask;
- if (TREE_INT_CST_HIGH (val)
- || ((unsigned HOST_WIDE_INT)
- TREE_INT_CST_LOW (val) != sel[i]))
- need_mask_canon = true;
+ /* Make sure that the perm value is in an acceptable
+ range. */
+ wide_int t = val;
+ need_mask_canon |= wi::gtu_p (t, mask);
+ need_mask_canon2 |= wi::gtu_p (t, mask2);
+ sel[i] = t.to_uhwi () & mask;
+ sel2[i] = t.to_uhwi () & mask2;
if (sel[i] < nelts)
all_in_vec1 = false;
&& (TREE_CODE (op1) == VECTOR_CST
|| TREE_CODE (op1) == CONSTRUCTOR))
{
- t = fold_vec_perm (type, op0, op1, sel);
+ tree t = fold_vec_perm (type, op0, op1, sel);
if (t != NULL_TREE)
return t;
}
if (op0 == op1 && !single_arg)
changed = true;
+ /* Some targets are deficient and fail to expand a single
+ argument permutation while still allowing an equivalent
+ 2-argument version. */
+ if (need_mask_canon && arg2 == op2
+ && !can_vec_perm_p (TYPE_MODE (type), false, sel)
+ && can_vec_perm_p (TYPE_MODE (type), false, sel2))
+ {
+ need_mask_canon = need_mask_canon2;
+ sel = sel2;
+ }
+
if (need_mask_canon && arg2 == op2)
{
tree *tsel = XALLOCAVEC (tree, nelts);
#undef fold
static void fold_checksum_tree (const_tree, struct md5_ctx *,
- hash_table <pointer_hash <tree_node> >);
+ hash_table<pointer_hash<const tree_node> > *);
static void fold_check_failed (const_tree, const_tree);
void print_fold_checksum (const_tree);
tree ret;
struct md5_ctx ctx;
unsigned char checksum_before[16], checksum_after[16];
- hash_table <pointer_hash <tree_node> > ht;
+ hash_table<pointer_hash<const tree_node> > ht (32);
- ht.create (32);
md5_init_ctx (&ctx);
- fold_checksum_tree (expr, &ctx, ht);
+ fold_checksum_tree (expr, &ctx, &ht);
md5_finish_ctx (&ctx, checksum_before);
ht.empty ();
ret = fold_1 (expr);
md5_init_ctx (&ctx);
- fold_checksum_tree (expr, &ctx, ht);
+ fold_checksum_tree (expr, &ctx, &ht);
md5_finish_ctx (&ctx, checksum_after);
- ht.dispose ();
if (memcmp (checksum_before, checksum_after, 16))
fold_check_failed (expr, ret);
{
struct md5_ctx ctx;
unsigned char checksum[16], cnt;
- hash_table <pointer_hash <tree_node> > ht;
+ hash_table<pointer_hash<const tree_node> > ht (32);
- ht.create (32);
md5_init_ctx (&ctx);
- fold_checksum_tree (expr, &ctx, ht);
+ fold_checksum_tree (expr, &ctx, &ht);
md5_finish_ctx (&ctx, checksum);
- ht.dispose ();
for (cnt = 0; cnt < 16; ++cnt)
fprintf (stderr, "%02x", checksum[cnt]);
putc ('\n', stderr);
static void
fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
- hash_table <pointer_hash <tree_node> > ht)
+ hash_table<pointer_hash <const tree_node> > *ht)
{
- tree_node **slot;
+ const tree_node **slot;
enum tree_code code;
union tree_node buf;
int i, len;
recursive_label:
if (expr == NULL)
return;
- slot = ht.find_slot (expr, INSERT);
+ slot = ht->find_slot (expr, INSERT);
if (*slot != NULL)
return;
- *slot = CONST_CAST_TREE (expr);
+ *slot = expr;
code = TREE_CODE (expr);
if (TREE_CODE_CLASS (code) == tcc_declaration
&& DECL_ASSEMBLER_NAME_SET_P (expr))
fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
}
- if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
- fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
{
- fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
+ if (TREE_CODE (expr) == FUNCTION_DECL)
+ {
+ fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
+ fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
+ }
fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
- fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
}
break;
case tcc_type:
int i;
unsigned char checksum[16];
struct md5_ctx ctx;
- hash_table <pointer_hash <tree_node> > ht;
- ht.create (32);
+ hash_table<pointer_hash<const tree_node> > ht (32);
md5_init_ctx (&ctx);
- fold_checksum_tree (t, &ctx, ht);
+ fold_checksum_tree (t, &ctx, &ht);
md5_finish_ctx (&ctx, checksum);
ht.empty ();
#ifdef ENABLE_FOLD_CHECKING
unsigned char checksum_before[16], checksum_after[16];
struct md5_ctx ctx;
- hash_table <pointer_hash <tree_node> > ht;
+ hash_table<pointer_hash<const tree_node> > ht (32);
- ht.create (32);
md5_init_ctx (&ctx);
- fold_checksum_tree (op0, &ctx, ht);
+ fold_checksum_tree (op0, &ctx, &ht);
md5_finish_ctx (&ctx, checksum_before);
ht.empty ();
#endif
#ifdef ENABLE_FOLD_CHECKING
md5_init_ctx (&ctx);
- fold_checksum_tree (op0, &ctx, ht);
+ fold_checksum_tree (op0, &ctx, &ht);
md5_finish_ctx (&ctx, checksum_after);
- ht.dispose ();
if (memcmp (checksum_before, checksum_after, 16))
fold_check_failed (op0, tem);
checksum_after_op0[16],
checksum_after_op1[16];
struct md5_ctx ctx;
- hash_table <pointer_hash <tree_node> > ht;
+ hash_table<pointer_hash<const tree_node> > ht (32);
- ht.create (32);
md5_init_ctx (&ctx);
- fold_checksum_tree (op0, &ctx, ht);
+ fold_checksum_tree (op0, &ctx, &ht);
md5_finish_ctx (&ctx, checksum_before_op0);
ht.empty ();
md5_init_ctx (&ctx);
- fold_checksum_tree (op1, &ctx, ht);
+ fold_checksum_tree (op1, &ctx, &ht);
md5_finish_ctx (&ctx, checksum_before_op1);
ht.empty ();
#endif
#ifdef ENABLE_FOLD_CHECKING
md5_init_ctx (&ctx);
- fold_checksum_tree (op0, &ctx, ht);
+ fold_checksum_tree (op0, &ctx, &ht);
md5_finish_ctx (&ctx, checksum_after_op0);
ht.empty ();
fold_check_failed (op0, tem);
md5_init_ctx (&ctx);
- fold_checksum_tree (op1, &ctx, ht);
+ fold_checksum_tree (op1, &ctx, &ht);
md5_finish_ctx (&ctx, checksum_after_op1);
- ht.dispose ();
if (memcmp (checksum_before_op1, checksum_after_op1, 16))
fold_check_failed (op1, tem);
checksum_after_op1[16],
checksum_after_op2[16];
struct md5_ctx ctx;
- hash_table <pointer_hash <tree_node> > ht;
+ hash_table<pointer_hash<const tree_node> > ht (32);
- ht.create (32);
md5_init_ctx (&ctx);
- fold_checksum_tree (op0, &ctx, ht);
+ fold_checksum_tree (op0, &ctx, &ht);
md5_finish_ctx (&ctx, checksum_before_op0);
ht.empty ();
md5_init_ctx (&ctx);
- fold_checksum_tree (op1, &ctx, ht);
+ fold_checksum_tree (op1, &ctx, &ht);
md5_finish_ctx (&ctx, checksum_before_op1);
ht.empty ();
md5_init_ctx (&ctx);
- fold_checksum_tree (op2, &ctx, ht);
+ fold_checksum_tree (op2, &ctx, &ht);
md5_finish_ctx (&ctx, checksum_before_op2);
ht.empty ();
#endif
#ifdef ENABLE_FOLD_CHECKING
md5_init_ctx (&ctx);
- fold_checksum_tree (op0, &ctx, ht);
+ fold_checksum_tree (op0, &ctx, &ht);
md5_finish_ctx (&ctx, checksum_after_op0);
ht.empty ();
fold_check_failed (op0, tem);
md5_init_ctx (&ctx);
- fold_checksum_tree (op1, &ctx, ht);
+ fold_checksum_tree (op1, &ctx, &ht);
md5_finish_ctx (&ctx, checksum_after_op1);
ht.empty ();
fold_check_failed (op1, tem);
md5_init_ctx (&ctx);
- fold_checksum_tree (op2, &ctx, ht);
+ fold_checksum_tree (op2, &ctx, &ht);
md5_finish_ctx (&ctx, checksum_after_op2);
- ht.dispose ();
if (memcmp (checksum_before_op2, checksum_after_op2, 16))
fold_check_failed (op2, tem);
checksum_after_fn[16],
checksum_after_arglist[16];
struct md5_ctx ctx;
- hash_table <pointer_hash <tree_node> > ht;
+ hash_table<pointer_hash<const tree_node> > ht (32);
int i;
- ht.create (32);
md5_init_ctx (&ctx);
- fold_checksum_tree (fn, &ctx, ht);
+ fold_checksum_tree (fn, &ctx, &ht);
md5_finish_ctx (&ctx, checksum_before_fn);
ht.empty ();
md5_init_ctx (&ctx);
for (i = 0; i < nargs; i++)
- fold_checksum_tree (argarray[i], &ctx, ht);
+ fold_checksum_tree (argarray[i], &ctx, &ht);
md5_finish_ctx (&ctx, checksum_before_arglist);
ht.empty ();
#endif
#ifdef ENABLE_FOLD_CHECKING
md5_init_ctx (&ctx);
- fold_checksum_tree (fn, &ctx, ht);
+ fold_checksum_tree (fn, &ctx, &ht);
md5_finish_ctx (&ctx, checksum_after_fn);
ht.empty ();
md5_init_ctx (&ctx);
for (i = 0; i < nargs; i++)
- fold_checksum_tree (argarray[i], &ctx, ht);
+ fold_checksum_tree (argarray[i], &ctx, &ht);
md5_finish_ctx (&ctx, checksum_after_arglist);
- ht.dispose ();
if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
fold_check_failed (NULL_TREE, tem);
return result;
}
-tree
-fold_build3_initializer_loc (location_t loc, enum tree_code code,
- tree type, tree op0, tree op1, tree op2)
-{
- tree result;
- START_FOLD_INIT;
-
- result = fold_build3_loc (loc, code, type, op0, op1, op2);
-
- END_FOLD_INIT;
- return result;
-}
-
tree
fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
int nargs, tree *argarray)
op1 = TREE_OPERAND (top, 1);
/* const_binop may not detect overflow correctly,
so check for it explicitly here. */
- if (TYPE_PRECISION (TREE_TYPE (size_one_node))
- > TREE_INT_CST_LOW (op1)
- && TREE_INT_CST_HIGH (op1) == 0
+ if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
&& 0 != (t1 = fold_convert (type,
const_binop (LSHIFT_EXPR,
size_one_node,
&& (tree_int_cst_sgn (top) < 0
|| tree_int_cst_sgn (bottom) < 0)))
return 0;
- return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
- top, bottom));
+ return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
+ SIGNED);
default:
return 0;
&& TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
{
unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
- ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
+ ? tree_int_cst_min_precision (op0, UNSIGNED)
: TYPE_PRECISION (inner0);
unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
- ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
+ ? tree_int_cst_min_precision (op1, UNSIGNED)
: TYPE_PRECISION (inner1);
return precision0 + precision1 < TYPE_PRECISION (type);
if ((n & 1) == 0)
{
REAL_VALUE_TYPE cint;
- real_from_integer (&cint, VOIDmode, n,
- n < 0 ? -1 : 0, 0);
+ real_from_integer (&cint, VOIDmode, n, SIGNED);
if (real_identical (&c, &cint))
return true;
}
set *STRICT_OVERFLOW_P to true; otherwise, don't change
*STRICT_OVERFLOW_P. */
-bool
+static bool
tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
{
enum tree_code code = TREE_CODE (t);
case ADDR_EXPR:
{
tree base = TREE_OPERAND (t, 0);
+
if (!DECL_P (base))
base = get_base_address (base);
if (!base)
return false;
- /* Weak declarations may link to NULL. Other things may also be NULL
- so protect with -fdelete-null-pointer-checks; but not variables
- allocated on the stack. */
+ /* For objects in symbol table check if we know they are non-zero.
+ Don't do anything for variables and functions before symtab is built;
+ it is quite possible that they will be declared weak later. */
+ if (DECL_P (base) && decl_in_symtab_p (base))
+ {
+ struct symtab_node *symbol;
+
+ symbol = symtab_node::get_create (base);
+ if (symbol)
+ return symbol->nonzero_address ();
+ else
+ return false;
+ }
+
+ /* Function local objects are never NULL. */
if (DECL_P (base)
- && (flag_delete_null_pointer_checks
- || (DECL_CONTEXT (base)
- && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
- && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
- return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
+ && (DECL_CONTEXT (base)
+ && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
+ && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
+ return true;
/* Constants are never weak. */
if (CONSTANT_CLASS_P (base))
return false;
}
-/* Return true when T is an address and is known to be nonzero.
- For floating point we further ensure that T is not denormal.
- Similar logic is present in nonzero_address in rtlanal.h.
-
- If the return value is based on the assumption that signed overflow
- is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
- change *STRICT_OVERFLOW_P. */
-
-bool
-tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
-{
- tree type = TREE_TYPE (t);
- enum tree_code code;
-
- /* Doing something useful for floating point would need more work. */
- if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
- return false;
-
- code = TREE_CODE (t);
- switch (TREE_CODE_CLASS (code))
- {
- case tcc_unary:
- return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
- strict_overflow_p);
- case tcc_binary:
- case tcc_comparison:
- return tree_binary_nonzero_warnv_p (code, type,
- TREE_OPERAND (t, 0),
- TREE_OPERAND (t, 1),
- strict_overflow_p);
- case tcc_constant:
- case tcc_declaration:
- case tcc_reference:
- return tree_single_nonzero_warnv_p (t, strict_overflow_p);
-
- default:
- break;
- }
-
- switch (code)
- {
- case TRUTH_NOT_EXPR:
- return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
- strict_overflow_p);
-
- case TRUTH_AND_EXPR:
- case TRUTH_OR_EXPR:
- case TRUTH_XOR_EXPR:
- return tree_binary_nonzero_warnv_p (code, type,
- TREE_OPERAND (t, 0),
- TREE_OPERAND (t, 1),
- strict_overflow_p);
-
- case COND_EXPR:
- case CONSTRUCTOR:
- case OBJ_TYPE_REF:
- case ASSERT_EXPR:
- case ADDR_EXPR:
- case WITH_SIZE_EXPR:
- case SSA_NAME:
- return tree_single_nonzero_warnv_p (t, strict_overflow_p);
-
- case COMPOUND_EXPR:
- case MODIFY_EXPR:
- case BIND_EXPR:
- return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
- strict_overflow_p);
-
- case SAVE_EXPR:
- return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
- strict_overflow_p);
-
- case CALL_EXPR:
- {
- tree fndecl = get_callee_fndecl (t);
- if (!fndecl) return false;
- if (flag_delete_null_pointer_checks && !flag_check_new
- && DECL_IS_OPERATOR_NEW (fndecl)
- && !TREE_NOTHROW (fndecl))
- return true;
- if (flag_delete_null_pointer_checks
- && lookup_attribute ("returns_nonnull",
- TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
- return true;
- return alloca_call_p (t);
- }
-
- default:
- break;
- }
- return false;
-}
-
-/* Return true when T is an address and is known to be nonzero.
- Handle warnings about undefined signed overflow. */
-
-bool
-tree_expr_nonzero_p (tree t)
-{
- bool ret, strict_overflow_p;
-
- strict_overflow_p = false;
- ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
- if (strict_overflow_p)
- fold_overflow_warning (("assuming signed overflow does not occur when "
- "determining that expression is always "
- "non-zero"),
- WARN_STRICT_OVERFLOW_MISC);
- return ret;
-}
-
/* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
attempt to fold the expression to a constant without modifying TYPE,
OP0 or OP1.
{
case INTEGER_CST:
{
- double_int val = tree_to_double_int (arg0);
bool overflow;
- val = val.neg_with_overflow (&overflow);
- t = force_fit_type_double (type, val, 1,
- (overflow | TREE_OVERFLOW (arg0))
- && !TYPE_UNSIGNED (type));
+ wide_int val = wi::neg (arg0, &overflow);
+ t = force_fit_type (type, val, 1,
+ (overflow | TREE_OVERFLOW (arg0))
+ && !TYPE_UNSIGNED (type));
break;
}
{
case INTEGER_CST:
{
- double_int val = tree_to_double_int (arg0);
-
/* If the value is unsigned or non-negative, then the absolute value
is the same as the ordinary value. */
- if (TYPE_UNSIGNED (type)
- || !val.is_negative ())
+ if (!wi::neg_p (arg0, TYPE_SIGN (type)))
t = arg0;
/* If the value is negative, then the absolute value is
else
{
bool overflow;
- val = val.neg_with_overflow (&overflow);
- t = force_fit_type_double (type, val, -1,
- overflow | TREE_OVERFLOW (arg0));
+ wide_int val = wi::neg (arg0, &overflow);
+ t = force_fit_type (type, val, -1,
+ overflow | TREE_OVERFLOW (arg0));
}
}
break;
static tree
fold_not_const (const_tree arg0, tree type)
{
- double_int val;
-
gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
- val = ~tree_to_double_int (arg0);
- return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
+ return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
}
/* Given CODE, a relational operator, the target type, TYPE and two
{
if (code == EQ_EXPR)
result = tree_int_cst_equal (op0, op1);
- else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
- result = INT_CST_LT_UNSIGNED (op0, op1);
else
- result = INT_CST_LT (op0, op1);
+ result = tree_int_cst_lt (op0, op1);
}
else
return NULL_TREE;
if (TREE_CODE (op00type) == VECTOR_TYPE
&& type == TREE_TYPE (op00type))
{
- HOST_WIDE_INT offset = tree_low_cst (op01, 0);
+ HOST_WIDE_INT offset = tree_to_shwi (op01);
tree part_width = TYPE_SIZE (type);
- unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
+ unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
tree index = bitsize_int (indexi);
- if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
+ if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
return fold_build3_loc (loc,
BIT_FIELD_REF, type, op00,
part_width, index);
}
}
-/* Return the value of VALUE, rounded up to a multiple of DIVISOR.
- This can only be applied to objects of a sizetype. */
+/* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
tree
-round_up_loc (location_t loc, tree value, int divisor)
+round_up_loc (location_t loc, tree value, unsigned int divisor)
{
tree div = NULL_TREE;
- gcc_assert (divisor > 0);
if (divisor == 1)
return value;
{
if (TREE_CODE (value) == INTEGER_CST)
{
- double_int val = tree_to_double_int (value);
+ wide_int val = value;
bool overflow_p;
- if ((val.low & (divisor - 1)) == 0)
+ if ((val & (divisor - 1)) == 0)
return value;
overflow_p = TREE_OVERFLOW (value);
- val.low &= ~(divisor - 1);
- val.low += divisor;
- if (val.low == 0)
- {
- val.high++;
- if (val.high == 0)
- overflow_p = true;
- }
+ val &= ~(divisor - 1);
+ val += divisor;
+ if (val == 0)
+ overflow_p = true;
- return force_fit_type_double (TREE_TYPE (value), val,
- -1, overflow_p);
+ return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
}
else
{
HOST_WIDE_INT *pbitpos, tree *poffset)
{
tree core;
- enum machine_mode mode;
+ machine_mode mode;
int unsignedp, volatilep;
HOST_WIDE_INT bitsize;
location_t loc = EXPR_LOCATION (exp);