#include "tree-vrp.h"
#include "tree-ssanames.h"
#include "selftest.h"
+#include "stringpool.h"
+#include "attribs.h"
/* Nonzero if we are folding constants inside an initializer; zero
otherwise. */
static bool negate_expr_p (tree);
static tree negate_expr (tree);
-static tree split_tree (location_t, tree, tree, enum tree_code,
- tree *, tree *, tree *, int);
static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
static enum comparison_code comparison_to_compcode (enum tree_code);
static enum tree_code compcode_to_comparison (enum comparison_code);
-static int operand_equal_for_comparison_p (tree, tree, tree);
static int twoval_comparison_p (tree, tree *, tree *, int *);
static tree eval_subst (location_t, tree, tree, tree, tree, tree);
static tree optimize_bit_field_compare (location_t, enum tree_code,
static tree fold_relational_const (enum tree_code, tree, tree, tree);
static tree fold_convert_const (enum tree_code, tree, tree);
static tree fold_view_convert_expr (tree, tree);
-static bool vec_cst_ctor_to_array (tree, tree *);
static tree fold_negate_expr (location_t, tree);
if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
return true;
- int count = TYPE_VECTOR_SUBPARTS (type), i;
+ int count = VECTOR_CST_NELTS (t), i;
for (i = 0; i < count; i++)
if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
case VECTOR_CST:
{
- int count = TYPE_VECTOR_SUBPARTS (type), i;
- tree *elts = XALLOCAVEC (tree, count);
+ int count = VECTOR_CST_NELTS (t), i;
+ auto_vec<tree, 32> elts (count);
for (i = 0; i < count; i++)
{
- elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
- if (elts[i] == NULL_TREE)
+ tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
+ if (elt == NULL_TREE)
return NULL_TREE;
+ elts.quick_push (elt);
}
return build_vector (type, elts);
same type as IN, but they will have the same signedness and mode. */
static tree
-split_tree (location_t loc, tree in, tree type, enum tree_code code,
- tree *conp, tree *litp, tree *minus_litp, int negate_p)
+split_tree (tree in, tree type, enum tree_code code,
+ tree *minus_varp, tree *conp, tree *minus_conp,
+ tree *litp, tree *minus_litp, int negate_p)
{
tree var = 0;
-
+ *minus_varp = 0;
*conp = 0;
+ *minus_conp = 0;
*litp = 0;
*minus_litp = 0;
if (neg_litp_p)
*minus_litp = *litp, *litp = 0;
if (neg_conp_p && *conp)
- {
- /* Convert to TYPE before negating. */
- *conp = fold_convert_loc (loc, type, *conp);
- *conp = negate_expr (*conp);
- }
+ *minus_conp = *conp, *conp = 0;
if (neg_var_p && var)
- {
- /* Convert to TYPE before negating. */
- var = fold_convert_loc (loc, type, var);
- var = negate_expr (var);
- }
+ *minus_varp = var, var = 0;
}
else if (TREE_CONSTANT (in))
*conp = in;
else if (TREE_CODE (in) == BIT_NOT_EXPR
&& code == PLUS_EXPR)
{
- /* -X - 1 is folded to ~X, undo that here. Do _not_ do this
- when IN is constant. Convert to TYPE before negating. */
- *minus_litp = build_one_cst (type);
- var = negate_expr (fold_convert_loc (loc, type, TREE_OPERAND (in, 0)));
+ /* -1 - X is folded to ~X, undo that here. Do _not_ do this
+ when IN is constant. */
+ *litp = build_minus_one_cst (type);
+ *minus_varp = TREE_OPERAND (in, 0);
}
else
var = in;
else if (*minus_litp)
*litp = *minus_litp, *minus_litp = 0;
if (*conp)
- {
- /* Convert to TYPE before negating. */
- *conp = fold_convert_loc (loc, type, *conp);
- *conp = negate_expr (*conp);
- }
+ *minus_conp = *conp, *conp = 0;
+ else if (*minus_conp)
+ *conp = *minus_conp, *minus_conp = 0;
if (var)
- {
- /* Convert to TYPE before negating. */
- var = fold_convert_loc (loc, type, var);
- var = negate_expr (var);
- }
+ *minus_varp = var, var = 0;
+ else if (*minus_varp)
+ var = *minus_varp, *minus_varp = 0;
}
if (*litp
associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
{
if (t1 == 0)
- return t2;
+ {
+ gcc_assert (t2 == 0 || code != MINUS_EXPR);
+ return t2;
+ }
else if (t2 == 0)
return t1;
try to fold this since we will have infinite recursion. But do
deal with any NEGATE_EXPRs. */
if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
+ || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
|| TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
{
if (code == PLUS_EXPR)
&& TREE_CODE (arg2) == VECTOR_CST)
{
tree type = TREE_TYPE (arg1);
- int count = TYPE_VECTOR_SUBPARTS (type), i;
- tree *elts = XALLOCAVEC (tree, count);
+ int count = VECTOR_CST_NELTS (arg1), i;
+ auto_vec<tree, 32> elts (count);
for (i = 0; i < count; i++)
{
tree elem1 = VECTOR_CST_ELT (arg1, i);
tree elem2 = VECTOR_CST_ELT (arg2, i);
- elts[i] = const_binop (code, elem1, elem2);
+ tree elt = const_binop (code, elem1, elem2);
/* It is possible that const_binop cannot handle the given
code and return NULL_TREE */
- if (elts[i] == NULL_TREE)
+ if (elt == NULL_TREE)
return NULL_TREE;
+ elts.quick_push (elt);
}
return build_vector (type, elts);
&& TREE_CODE (arg2) == INTEGER_CST)
{
tree type = TREE_TYPE (arg1);
- int count = TYPE_VECTOR_SUBPARTS (type), i;
- tree *elts = XALLOCAVEC (tree, count);
+ int count = VECTOR_CST_NELTS (arg1), i;
+ auto_vec<tree, 32> elts (count);
for (i = 0; i < count; i++)
{
tree elem1 = VECTOR_CST_ELT (arg1, i);
- elts[i] = const_binop (code, elem1, arg2);
+ tree elt = const_binop (code, elem1, arg2);
/* It is possible that const_binop cannot handle the given
code and return NULL_TREE. */
- if (elts[i] == NULL_TREE)
+ if (elt == NULL_TREE)
return NULL_TREE;
+ elts.quick_push (elt);
}
return build_vector (type, elts);
case VEC_PACK_TRUNC_EXPR:
case VEC_PACK_FIX_TRUNC_EXPR:
{
- unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
- tree *elts;
+ unsigned int out_nelts, in_nelts, i;
- gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
- && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
if (TREE_CODE (arg1) != VECTOR_CST
|| TREE_CODE (arg2) != VECTOR_CST)
return NULL_TREE;
- elts = XALLOCAVEC (tree, nelts);
- if (!vec_cst_ctor_to_array (arg1, elts)
- || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
- return NULL_TREE;
+ in_nelts = VECTOR_CST_NELTS (arg1);
+ out_nelts = in_nelts * 2;
+ gcc_assert (in_nelts == VECTOR_CST_NELTS (arg2)
+ && out_nelts == TYPE_VECTOR_SUBPARTS (type));
- for (i = 0; i < nelts; i++)
+ auto_vec<tree, 32> elts (out_nelts);
+ for (i = 0; i < out_nelts; i++)
{
- elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
- ? NOP_EXPR : FIX_TRUNC_EXPR,
- TREE_TYPE (type), elts[i]);
- if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
+ tree elt = (i < in_nelts
+ ? VECTOR_CST_ELT (arg1, i)
+ : VECTOR_CST_ELT (arg2, i - in_nelts));
+ elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
+ ? NOP_EXPR : FIX_TRUNC_EXPR,
+ TREE_TYPE (type), elt);
+ if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
return NULL_TREE;
+ elts.quick_push (elt);
}
return build_vector (type, elts);
case VEC_WIDEN_MULT_EVEN_EXPR:
case VEC_WIDEN_MULT_ODD_EXPR:
{
- unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
- unsigned int out, ofs, scale;
- tree *elts;
+ unsigned int out_nelts, in_nelts, out, ofs, scale;
- gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
- && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
return NULL_TREE;
- elts = XALLOCAVEC (tree, nelts * 4);
- if (!vec_cst_ctor_to_array (arg1, elts)
- || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
- return NULL_TREE;
+ in_nelts = VECTOR_CST_NELTS (arg1);
+ out_nelts = in_nelts / 2;
+ gcc_assert (in_nelts == VECTOR_CST_NELTS (arg2)
+ && out_nelts == TYPE_VECTOR_SUBPARTS (type));
if (code == VEC_WIDEN_MULT_LO_EXPR)
- scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
+ scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
else if (code == VEC_WIDEN_MULT_HI_EXPR)
- scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
+ scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
scale = 1, ofs = 0;
else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
scale = 1, ofs = 1;
- for (out = 0; out < nelts; out++)
+ auto_vec<tree, 32> elts (out_nelts);
+ for (out = 0; out < out_nelts; out++)
{
- unsigned int in1 = (out << scale) + ofs;
- unsigned int in2 = in1 + nelts * 2;
- tree t1, t2;
-
- t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
- t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
+ unsigned int in = (out << scale) + ofs;
+ tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
+ VECTOR_CST_ELT (arg1, in));
+ tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
+ VECTOR_CST_ELT (arg2, in));
if (t1 == NULL_TREE || t2 == NULL_TREE)
return NULL_TREE;
- elts[out] = const_binop (MULT_EXPR, t1, t2);
- if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
+ tree elt = const_binop (MULT_EXPR, t1, t2);
+ if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
return NULL_TREE;
+ elts.quick_push (elt);
}
return build_vector (type, elts);
/* Perform BIT_NOT_EXPR on each element individually. */
else if (TREE_CODE (arg0) == VECTOR_CST)
{
- tree *elements;
tree elem;
unsigned count = VECTOR_CST_NELTS (arg0), i;
- elements = XALLOCAVEC (tree, count);
+ auto_vec<tree, 32> elements (count);
for (i = 0; i < count; i++)
{
elem = VECTOR_CST_ELT (arg0, i);
elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
if (elem == NULL_TREE)
break;
- elements[i] = elem;
+ elements.quick_push (elem);
}
if (i == count)
return build_vector (type, elements);
case VEC_UNPACK_FLOAT_LO_EXPR:
case VEC_UNPACK_FLOAT_HI_EXPR:
{
- unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
- tree *elts;
+ unsigned int out_nelts, in_nelts, i;
enum tree_code subcode;
- gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
if (TREE_CODE (arg0) != VECTOR_CST)
return NULL_TREE;
- elts = XALLOCAVEC (tree, nelts * 2);
- if (!vec_cst_ctor_to_array (arg0, elts))
- return NULL_TREE;
+ in_nelts = VECTOR_CST_NELTS (arg0);
+ out_nelts = in_nelts / 2;
+ gcc_assert (out_nelts == TYPE_VECTOR_SUBPARTS (type));
+ unsigned int offset = 0;
if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
|| code == VEC_UNPACK_FLOAT_LO_EXPR))
- elts += nelts;
+ offset = out_nelts;
if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
subcode = NOP_EXPR;
else
subcode = FLOAT_EXPR;
- for (i = 0; i < nelts; i++)
+ auto_vec<tree, 32> elts (out_nelts);
+ for (i = 0; i < out_nelts; i++)
{
- elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
- if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
+ tree elt = fold_convert_const (subcode, TREE_TYPE (type),
+ VECTOR_CST_ELT (arg0, i + offset));
+ if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
return NULL_TREE;
+ elts.quick_push (elt);
}
return build_vector (type, elts);
case REDUC_PLUS_EXPR:
{
unsigned int nelts, i;
- tree *elts;
enum tree_code subcode;
if (TREE_CODE (arg0) != VECTOR_CST)
return NULL_TREE;
- nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
-
- elts = XALLOCAVEC (tree, nelts);
- if (!vec_cst_ctor_to_array (arg0, elts))
- return NULL_TREE;
+ nelts = VECTOR_CST_NELTS (arg0);
switch (code)
{
default: gcc_unreachable ();
}
+ tree res = VECTOR_CST_ELT (arg0, 0);
for (i = 1; i < nelts; i++)
{
- elts[0] = const_binop (subcode, elts[0], elts[i]);
- if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
+ res = const_binop (subcode, res, VECTOR_CST_ELT (arg0, i));
+ if (res == NULL_TREE || !CONSTANT_CLASS_P (res))
return NULL_TREE;
}
- return elts[0];
+ return res;
}
default:
{
tree t;
double_int temp, temp_trunc;
- unsigned int mode;
+ scalar_mode mode;
/* Right shift FIXED_CST to temp by fbit. */
temp = TREE_FIXED_CST (arg1).data;
REAL_VALUE_TYPE value;
tree t;
- real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
+ real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
+ &TREE_FIXED_CST (arg1));
t = build_real (type, value);
TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
tree t;
bool overflow_p;
- overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
- TYPE_SATURATING (type));
+ overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
+ &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
t = build_fixed (type, value);
/* Propagate overflow flags. */
else
di.high = TREE_INT_CST_ELT (arg1, 1);
- overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
+ overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
TYPE_UNSIGNED (TREE_TYPE (arg1)),
TYPE_SATURATING (type));
t = build_fixed (type, value);
tree t;
bool overflow_p;
- overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
+ overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
&TREE_REAL_CST (arg1),
TYPE_SATURATING (type));
t = build_fixed (type, value);
if (TREE_CODE (arg1) == VECTOR_CST
&& TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
{
- int len = TYPE_VECTOR_SUBPARTS (type);
+ int len = VECTOR_CST_NELTS (arg1);
tree elttype = TREE_TYPE (type);
- tree *v = XALLOCAVEC (tree, len);
+ auto_vec<tree, 32> v (len);
for (int i = 0; i < len; ++i)
{
tree elt = VECTOR_CST_ELT (arg1, i);
tree cvt = fold_convert_const (code, elttype, elt);
if (cvt == NULL_TREE)
return NULL_TREE;
- v[i] = cvt;
+ v.quick_push (cvt);
}
return build_vector (type, v);
}
#undef OP_SAME_WITH_NULL
}
\f
-/* Similar to operand_equal_p, but see if ARG0 might have been made by
- shorten_compare from ARG1 when ARG1 was being compared with OTHER.
+/* Similar to operand_equal_p, but strip nops first. */
- When in doubt, return 0. */
-
-static int
-operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
+static bool
+operand_equal_for_comparison_p (tree arg0, tree arg1)
{
- int unsignedp1, unsignedpo;
- tree primarg0, primarg1, primother;
- unsigned int correct_width;
-
if (operand_equal_p (arg0, arg1, 0))
- return 1;
+ return true;
if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
|| ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
- return 0;
+ return false;
/* Discard any conversions that don't change the modes of ARG0 and ARG1
and see if the inner values are the same. This removes any
signedness comparison, which doesn't matter here. */
- primarg0 = arg0, primarg1 = arg1;
- STRIP_NOPS (primarg0);
- STRIP_NOPS (primarg1);
- if (operand_equal_p (primarg0, primarg1, 0))
- return 1;
-
- /* Duplicate what shorten_compare does to ARG1 and see if that gives the
- actual comparison operand, ARG0.
-
- First throw away any conversions to wider types
- already present in the operands. */
-
- primarg1 = get_narrower (arg1, &unsignedp1);
- primother = get_narrower (other, &unsignedpo);
-
- correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
- if (unsignedp1 == unsignedpo
- && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
- && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
- {
- tree type = TREE_TYPE (arg0);
-
- /* Make sure shorter operand is extended the right way
- to match the longer operand. */
- primarg1 = fold_convert (signed_or_unsigned_type_for
- (unsignedp1, TREE_TYPE (primarg1)), primarg1);
-
- if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
- return 1;
- }
+ STRIP_NOPS (arg0);
+ STRIP_NOPS (arg1);
+ if (operand_equal_p (arg0, arg1, 0))
+ return true;
- return 0;
+ return false;
}
\f
/* See if ARG is an expression that is either a comparison or is performing
tree type = TREE_TYPE (lhs);
tree unsigned_type;
int const_p = TREE_CODE (rhs) == INTEGER_CST;
- machine_mode lmode, rmode, nmode;
+ machine_mode lmode, rmode;
+ scalar_int_mode nmode;
int lunsignedp, runsignedp;
int lreversep, rreversep;
int lvolatilep = 0, rvolatilep = 0;
/* See if we can find a mode to refer to this field. We should be able to,
but fail if we can't. */
- nmode = get_best_mode (lbitsize, lbitpos, bitstart, bitend,
- const_p ? TYPE_ALIGN (TREE_TYPE (linner))
- : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
- TYPE_ALIGN (TREE_TYPE (rinner))),
- word_mode, false);
- if (nmode == VOIDmode)
+ if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
+ const_p ? TYPE_ALIGN (TREE_TYPE (linner))
+ : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
+ TYPE_ALIGN (TREE_TYPE (rinner))),
+ BITS_PER_WORD, false, &nmode))
return 0;
/* Set signed and unsigned types of the precision of this mode for the
expressions will be false, so all four give B. The min()
and max() versions would give a NaN instead. */
if (!HONOR_SIGNED_ZEROS (element_mode (type))
- && operand_equal_for_comparison_p (arg01, arg2, arg00)
+ && operand_equal_for_comparison_p (arg01, arg2)
/* Avoid these transformations if the COND_EXPR may be used
as an lvalue in the C++ front-end. PR c++/19199. */
&& (in_gimple_form
short-circuited branch and the underlying object on both sides
is the same, make a non-short-circuit operation. */
else if (LOGICAL_OP_NON_SHORT_CIRCUIT
+ && !flag_sanitize_coverage
&& lhs != 0 && rhs != 0
&& (code == TRUTH_ANDIF_EXPR
|| code == TRUTH_ORIF_EXPR)
unextend (tree c, int p, int unsignedp, tree mask)
{
tree type = TREE_TYPE (c);
- int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
+ int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
tree temp;
if (p == modesize || unsignedp)
int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
- machine_mode lnmode, rnmode;
+ scalar_int_mode lnmode, rnmode;
tree ll_mask, lr_mask, rl_mask, rr_mask;
tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
tree l_const, r_const;
to be relative to a field of that size. */
first_bit = MIN (ll_bitpos, rl_bitpos);
end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
- lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
- TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
- volatilep);
- if (lnmode == VOIDmode)
+ if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
+ TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
+ volatilep, &lnmode))
return 0;
lnbitsize = GET_MODE_BITSIZE (lnmode);
first_bit = MIN (lr_bitpos, rr_bitpos);
end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
- rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
- TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
- volatilep);
- if (rnmode == VOIDmode)
+ if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
+ TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
+ volatilep, &rnmode))
return 0;
rnbitsize = GET_MODE_BITSIZE (rnmode);
{
tree type = TREE_TYPE (t);
enum tree_code tcode = TREE_CODE (t);
- tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
- > GET_MODE_SIZE (TYPE_MODE (type)))
+ tree ctype = (wide_type != 0
+ && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
+ > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
? wide_type : type);
tree t1, t2;
int same_p = tcode == code;
if (arg00 != NULL_TREE
/* This is only a win if casting to a signed type is cheap,
i.e. when arg00's type is not a partial mode. */
- && TYPE_PRECISION (TREE_TYPE (arg00))
- == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
+ && type_has_mode_precision_p (TREE_TYPE (arg00)))
{
tree stype = signed_type_for (TREE_TYPE (arg00));
return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
tree inner = TREE_OPERAND (arg0, 0);
tree type = TREE_TYPE (arg0);
int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
- machine_mode operand_mode = TYPE_MODE (type);
+ scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
int ops_unsigned;
tree signed_type, unsigned_type, intermediate_type;
tree tem, one;
native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
{
tree type = TREE_TYPE (expr);
- int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
+ int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
int byte, offset, word, words;
unsigned char value;
native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
{
tree type = TREE_TYPE (expr);
- machine_mode mode = TYPE_MODE (type);
+ scalar_mode mode = SCALAR_TYPE_MODE (type);
int total_bytes = GET_MODE_SIZE (mode);
FIXED_VALUE_TYPE value;
tree i_value, i_type;
native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
{
tree type = TREE_TYPE (expr);
- int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
+ int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
int byte, offset, word, words, bitpos;
unsigned char value;
return 0;
part = TREE_IMAGPART (expr);
if (off != -1)
- off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
+ off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
if (off == -1
&& isize != rsize)
offset = 0;
count = VECTOR_CST_NELTS (expr);
itype = TREE_TYPE (TREE_TYPE (expr));
- size = GET_MODE_SIZE (TYPE_MODE (itype));
+ size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
for (i = 0; i < count; i++)
{
if (off >= size)
static int
native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
{
- tree type = TREE_TYPE (expr);
- HOST_WIDE_INT total_bytes;
-
- if (TREE_CODE (type) != ARRAY_TYPE
- || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
- || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
- || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
+ if (! can_native_encode_string_p (expr))
return 0;
- total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
+
+ HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
if ((off == -1 && total_bytes > len)
|| off >= total_bytes)
return 0;
static tree
native_interpret_int (tree type, const unsigned char *ptr, int len)
{
- int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
+ int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
if (total_bytes > len
|| total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
static tree
native_interpret_fixed (tree type, const unsigned char *ptr, int len)
{
- int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
+ scalar_mode mode = SCALAR_TYPE_MODE (type);
+ int total_bytes = GET_MODE_SIZE (mode);
double_int result;
FIXED_VALUE_TYPE fixed_value;
return NULL_TREE;
result = double_int::from_buffer (ptr, total_bytes);
- fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
+ fixed_value = fixed_from_double_int (result, mode);
return build_fixed (type, fixed_value);
}
static tree
native_interpret_real (tree type, const unsigned char *ptr, int len)
{
- machine_mode mode = TYPE_MODE (type);
+ scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
int total_bytes = GET_MODE_SIZE (mode);
unsigned char value;
/* There are always 32 bits in each long, no matter the size of
REAL_VALUE_TYPE r;
long tmp[6];
- total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
if (total_bytes > len || total_bytes > 24)
return NULL_TREE;
int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
int size;
etype = TREE_TYPE (type);
- size = GET_MODE_SIZE (TYPE_MODE (etype));
+ size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
if (size * 2 > len)
return NULL_TREE;
rpart = native_interpret_expr (etype, ptr, size);
{
tree etype, elem;
int i, size, count;
- tree *elements;
etype = TREE_TYPE (type);
- size = GET_MODE_SIZE (TYPE_MODE (etype));
+ size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
count = TYPE_VECTOR_SUBPARTS (type);
if (size * count > len)
return NULL_TREE;
- elements = XALLOCAVEC (tree, count);
- for (i = count - 1; i >= 0; i--)
+ auto_vec<tree, 32> elements (count);
+ for (i = 0; i < count; ++i)
{
elem = native_interpret_expr (etype, ptr+(i*size), size);
if (!elem)
return NULL_TREE;
- elements[i] = elem;
+ elements.quick_push (elem);
}
return build_vector (type, elements);
}
}
}
+/* Return true iff a STRING_CST S is accepted by
+ native_encode_expr. */
+
+bool
+can_native_encode_string_p (const_tree expr)
+{
+ tree type = TREE_TYPE (expr);
+
+ /* Wide-char strings are encoded in target byte-order so native
+ encoding them is trivial. */
+ if (BITS_PER_UNIT != CHAR_BIT
+ || TREE_CODE (type) != ARRAY_TYPE
+ || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
+ || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
+ return false;
+ return true;
+}
+
/* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
TYPE at compile-time. If we're unable to perform the conversion
return NULL_TREE. */
return tem;
if (LOGICAL_OP_NON_SHORT_CIRCUIT
+ && !flag_sanitize_coverage
&& (code == TRUTH_AND_EXPR
|| code == TRUTH_ANDIF_EXPR
|| code == TRUTH_OR_EXPR
{
/* We can fold this expression to a constant if the non-constant
offset parts are equal. */
- if ((offset0 == offset1
- || (offset0 && offset1
- && operand_equal_p (offset0, offset1, 0)))
- && (equality_code
- || (indirect_base0
- && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
- || POINTER_TYPE_OVERFLOW_UNDEFINED))
-
+ if (offset0 == offset1
+ || (offset0 && offset1
+ && operand_equal_p (offset0, offset1, 0)))
{
if (!equality_code
&& bitpos0 != bitpos1
because pointer arithmetic is restricted to retain within an
object and overflow on pointer differences is undefined as of
6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
- else if (bitpos0 == bitpos1
- && (equality_code
- || (indirect_base0
- && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
- || POINTER_TYPE_OVERFLOW_UNDEFINED))
+ else if (bitpos0 == bitpos1)
{
/* By converting to signed sizetype we cover middle-end pointer
arithmetic which operates on unsigned pointer types of size
/* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
- CONSTRUCTOR ARG into array ELTS and return true if successful. */
+ CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
+ true if successful. */
static bool
-vec_cst_ctor_to_array (tree arg, tree *elts)
+vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
{
- unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
+ unsigned int i;
if (TREE_CODE (arg) == VECTOR_CST)
{
fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
{
unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
- tree *elts;
bool need_ctor = false;
gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
|| TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
return NULL_TREE;
- elts = XALLOCAVEC (tree, nelts * 3);
- if (!vec_cst_ctor_to_array (arg0, elts)
- || !vec_cst_ctor_to_array (arg1, elts + nelts))
+ tree *in_elts = XALLOCAVEC (tree, nelts * 2);
+ if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
+ || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
return NULL_TREE;
+ auto_vec<tree, 32> out_elts (nelts);
for (i = 0; i < nelts; i++)
{
- if (!CONSTANT_CLASS_P (elts[sel[i]]))
+ if (!CONSTANT_CLASS_P (in_elts[sel[i]]))
need_ctor = true;
- elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
+ out_elts.quick_push (unshare_expr (in_elts[sel[i]]));
}
if (need_ctor)
vec<constructor_elt, va_gc> *v;
vec_alloc (v, nelts);
for (i = 0; i < nelts; i++)
- CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
+ CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
return build_constructor (type, v);
}
else
- return build_vector (type, &elts[2 * nelts]);
+ return build_vector (type, out_elts);
}
/* Try to fold a pointer difference of type TYPE two address expressions of
exact_inverse (tree type, tree cst)
{
REAL_VALUE_TYPE r;
- tree unit_type, *elts;
+ tree unit_type;
machine_mode mode;
unsigned vec_nelts, i;
return NULL_TREE;
case VECTOR_CST:
- vec_nelts = VECTOR_CST_NELTS (cst);
- elts = XALLOCAVEC (tree, vec_nelts);
- unit_type = TREE_TYPE (type);
- mode = TYPE_MODE (unit_type);
+ {
+ vec_nelts = VECTOR_CST_NELTS (cst);
+ unit_type = TREE_TYPE (type);
+ mode = TYPE_MODE (unit_type);
- for (i = 0; i < vec_nelts; i++)
- {
- r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
- if (!exact_real_inverse (mode, &r))
- return NULL_TREE;
- elts[i] = build_real (unit_type, r);
- }
+ auto_vec<tree, 32> elts (vec_nelts);
+ for (i = 0; i < vec_nelts; i++)
+ {
+ r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
+ if (!exact_real_inverse (mode, &r))
+ return NULL_TREE;
+ elts.quick_push (build_real (unit_type, r));
+ }
- return build_vector (type, elts);
+ return build_vector (type, elts);
+ }
default:
return NULL_TREE;
if ((! FLOAT_TYPE_P (type) || flag_associative_math)
&& !TYPE_SATURATING (type))
{
- tree var0, con0, lit0, minus_lit0;
- tree var1, con1, lit1, minus_lit1;
+ tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
+ tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
tree atype = type;
bool ok = true;
then the result with variables. This increases the chances of
literals being recombined later and of generating relocatable
expressions for the sum of a constant and literal. */
- var0 = split_tree (loc, arg0, type, code,
- &con0, &lit0, &minus_lit0, 0);
- var1 = split_tree (loc, arg1, type, code,
- &con1, &lit1, &minus_lit1, code == MINUS_EXPR);
+ var0 = split_tree (arg0, type, code,
+ &minus_var0, &con0, &minus_con0,
+ &lit0, &minus_lit0, 0);
+ var1 = split_tree (arg1, type, code,
+ &minus_var1, &con1, &minus_con1,
+ &lit1, &minus_lit1, code == MINUS_EXPR);
/* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
if (code == MINUS_EXPR)
/* With undefined overflow prefer doing association in a type
which wraps on overflow, if that is one of the operand types. */
- if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
+ if (POINTER_TYPE_P (type)
|| (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
{
if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
/* With undefined overflow we can only associate constants with one
variable, and constants whose association doesn't overflow. */
- if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
+ if (POINTER_TYPE_P (atype)
|| (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
{
- if (var0 && var1)
+ if ((var0 && var1) || (minus_var0 && minus_var1))
{
- tree tmp0 = var0;
- tree tmp1 = var1;
+ /* ??? If split_tree would handle NEGATE_EXPR we could
+ simply reject these cases and the allowed cases would
+ be the var0/minus_var1 ones. */
+ tree tmp0 = var0 ? var0 : minus_var0;
+ tree tmp1 = var1 ? var1 : minus_var1;
bool one_neg = false;
if (TREE_CODE (tmp0) == NEGATE_EXPR)
|| !operand_equal_p (tmp0, tmp1, 0))
ok = false;
}
+ else if ((var0 && minus_var1
+ && ! operand_equal_p (var0, minus_var1, 0))
+ || (minus_var0 && var1
+ && ! operand_equal_p (minus_var0, var1, 0)))
+ ok = false;
}
/* Only do something if we found more than two objects. Otherwise,
nothing has changed and we risk infinite recursion. */
if (ok
&& (2 < ((var0 != 0) + (var1 != 0)
+ + (minus_var0 != 0) + (minus_var1 != 0)
+ (con0 != 0) + (con1 != 0)
+ + (minus_con0 != 0) + (minus_con1 != 0)
+ (lit0 != 0) + (lit1 != 0)
+ (minus_lit0 != 0) + (minus_lit1 != 0))))
{
var0 = associate_trees (loc, var0, var1, code, atype);
+ minus_var0 = associate_trees (loc, minus_var0, minus_var1,
+ code, atype);
con0 = associate_trees (loc, con0, con1, code, atype);
+ minus_con0 = associate_trees (loc, minus_con0, minus_con1,
+ code, atype);
lit0 = associate_trees (loc, lit0, lit1, code, atype);
minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
code, atype);
+ if (minus_var0 && var0)
+ {
+ var0 = associate_trees (loc, var0, minus_var0,
+ MINUS_EXPR, atype);
+ minus_var0 = 0;
+ }
+ if (minus_con0 && con0)
+ {
+ con0 = associate_trees (loc, con0, minus_con0,
+ MINUS_EXPR, atype);
+ minus_con0 = 0;
+ }
+
/* Preserve the MINUS_EXPR if the negative part of the literal is
greater than the positive part. Otherwise, the multiplicative
folding code (i.e extract_muldiv) may be fooled in case
{
if (TREE_CODE (lit0) == INTEGER_CST
&& TREE_CODE (minus_lit0) == INTEGER_CST
- && tree_int_cst_lt (lit0, minus_lit0))
+ && tree_int_cst_lt (lit0, minus_lit0)
+ /* But avoid ending up with only negated parts. */
+ && (var0 || con0))
{
minus_lit0 = associate_trees (loc, minus_lit0, lit0,
MINUS_EXPR, atype);
|| (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
return NULL_TREE;
- if (minus_lit0)
+ /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
+ con0 = associate_trees (loc, con0, lit0, code, atype);
+ lit0 = 0;
+ minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
+ code, atype);
+ minus_lit0 = 0;
+
+ /* Eliminate minus_con0. */
+ if (minus_con0)
{
- if (con0 == 0)
- return
- fold_convert_loc (loc, type,
- associate_trees (loc, var0, minus_lit0,
- MINUS_EXPR, atype));
+ if (con0)
+ con0 = associate_trees (loc, con0, minus_con0,
+ MINUS_EXPR, atype);
+ else if (var0)
+ var0 = associate_trees (loc, var0, minus_con0,
+ MINUS_EXPR, atype);
else
- {
- con0 = associate_trees (loc, con0, minus_lit0,
- MINUS_EXPR, atype);
- return
- fold_convert_loc (loc, type,
- associate_trees (loc, var0, con0,
- PLUS_EXPR, atype));
- }
+ gcc_unreachable ();
+ minus_con0 = 0;
+ }
+
+ /* Eliminate minus_var0. */
+ if (minus_var0)
+ {
+ if (con0)
+ con0 = associate_trees (loc, con0, minus_var0,
+ MINUS_EXPR, atype);
+ else
+ gcc_unreachable ();
+ minus_var0 = 0;
}
- con0 = associate_trees (loc, con0, lit0, code, atype);
return
fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
code, atype));
&& (TREE_CODE (arg2) == VECTOR_CST
|| TREE_CODE (arg2) == CONSTRUCTOR))
{
- unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
+ unsigned int nelts = VECTOR_CST_NELTS (arg0), i;
unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
- gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
+ gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type));
for (i = 0; i < nelts; i++)
{
tree val = VECTOR_CST_ELT (arg0, i);
Also try swapping the arguments and inverting the conditional. */
if (COMPARISON_CLASS_P (arg0)
- && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
- arg1, TREE_OPERAND (arg0, 1))
+ && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), arg1)
&& !HONOR_SIGNED_ZEROS (element_mode (arg1)))
{
tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
}
if (COMPARISON_CLASS_P (arg0)
- && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
- op2,
- TREE_OPERAND (arg0, 1))
+ && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
&& !HONOR_SIGNED_ZEROS (element_mode (op2)))
{
location_t loc0 = expr_location_or (arg0, loc);
if (n == 1)
return VECTOR_CST_ELT (arg0, idx);
- tree *vals = XALLOCAVEC (tree, n);
+ auto_vec<tree, 32> vals (n);
for (unsigned i = 0; i < n; ++i)
- vals[i] = VECTOR_CST_ELT (arg0, idx + i);
+ vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
return build_vector (type, vals);
}
}
case VEC_PERM_EXPR:
if (TREE_CODE (arg2) == VECTOR_CST)
{
- unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
+ unsigned int nelts = VECTOR_CST_NELTS (arg2), i, mask, mask2;
unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
unsigned char *sel2 = sel + nelts;
bool need_mask_canon = false;
mask2 = 2 * nelts - 1;
mask = single_arg ? (nelts - 1) : mask2;
- gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
+ gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type));
for (i = 0; i < nelts; i++)
{
tree val = VECTOR_CST_ELT (arg2, i);
if (need_mask_canon && arg2 == op2)
{
- tree *tsel = XALLOCAVEC (tree, nelts);
tree eltype = TREE_TYPE (TREE_TYPE (arg2));
+ auto_vec<tree, 32> tsel (nelts);
for (i = 0; i < nelts; i++)
- tsel[i] = build_int_cst (eltype, sel[i]);
+ tsel.quick_push (build_int_cst (eltype, sel[i]));
op2 = build_vector (TREE_TYPE (arg2), tsel);
changed = true;
}
return arg0;
else
{
- tree *elts = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
- memcpy (elts, VECTOR_CST_ELTS (arg0),
- sizeof (tree) * TYPE_VECTOR_SUBPARTS (type));
+ unsigned int nelts = VECTOR_CST_NELTS (arg0);
+ auto_vec<tree, 32> elts (nelts);
+ elts.quick_grow (nelts);
+ memcpy (&elts[0], VECTOR_CST_ELTS (arg0),
+ sizeof (tree) * nelts);
elts[k] = arg1;
return build_vector (type, elts);
}
expression with code CODE of type TYPE with an operand OP0. */
tree
-fold_build1_stat_loc (location_t loc,
- enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
+fold_build1_loc (location_t loc,
+ enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
{
tree tem;
#ifdef ENABLE_FOLD_CHECKING
OP0 and OP1. */
tree
-fold_build2_stat_loc (location_t loc,
+fold_build2_loc (location_t loc,
enum tree_code code, tree type, tree op0, tree op1
MEM_STAT_DECL)
{
type TYPE with operands OP0, OP1, and OP2. */
tree
-fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
+fold_build3_loc (location_t loc, enum tree_code code, tree type,
tree op0, tree op1, tree op2 MEM_STAT_DECL)
{
tree tem;
string = exp1;
}
+ scalar_int_mode char_mode;
if (string
&& TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
&& TREE_CODE (string) == STRING_CST
&& TREE_CODE (index) == INTEGER_CST
&& compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
- && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
- == MODE_INT)
- && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
+ && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
+ &char_mode)
+ && GET_MODE_SIZE (char_mode) == 1)
return build_int_cst_type (TREE_TYPE (exp),
(TREE_STRING_POINTER (string)
[TREE_INT_CST_LOW (index)]));
return constant_boolean_node (true, type);
}
unsigned count = VECTOR_CST_NELTS (op0);
- tree *elts = XALLOCAVEC (tree, count);
gcc_assert (VECTOR_CST_NELTS (op1) == count
&& TYPE_VECTOR_SUBPARTS (type) == count);
+ auto_vec<tree, 32> elts (count);
for (unsigned i = 0; i < count; i++)
{
tree elem_type = TREE_TYPE (type);
if (tem == NULL_TREE)
return NULL_TREE;
- elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
+ elts.quick_push (build_int_cst (elem_type,
+ integer_zerop (tem) ? 0 : -1));
}
return build_vector (type, elts);
&& type == TREE_TYPE (op00type))
{
tree type_domain = TYPE_DOMAIN (op00type);
- tree min_val = size_zero_node;
+ tree min = size_zero_node;
if (type_domain && TYPE_MIN_VALUE (type_domain))
- min_val = TYPE_MIN_VALUE (type_domain);
- op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
- TYPE_SIZE_UNIT (type));
- op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
- return build4_loc (loc, ARRAY_REF, type, op00, op01,
- NULL_TREE, NULL_TREE);
+ min = TYPE_MIN_VALUE (type_domain);
+ offset_int off = wi::to_offset (op01);
+ offset_int el_sz = wi::to_offset (TYPE_SIZE_UNIT (type));
+ offset_int remainder;
+ off = wi::divmod_trunc (off, el_sz, SIGNED, &remainder);
+ if (remainder == 0 && TREE_CODE (min) == INTEGER_CST)
+ {
+ off = off + wi::to_offset (min);
+ op01 = wide_int_to_tree (sizetype, off);
+ return build4_loc (loc, ARRAY_REF, type, op00, op01,
+ NULL_TREE, NULL_TREE);
+ }
}
}
}