/* Fold a constant sub-tree into a single node for C-compiler
- Copyright (C) 1987-2016 Free Software Foundation, Inc.
+ Copyright (C) 1987-2017 Free Software Foundation, Inc.
This file is part of GCC.
#include "tree.h"
#include "gimple.h"
#include "predict.h"
+#include "memmodel.h"
#include "tm_p.h"
#include "tree-ssa-operands.h"
#include "optabs-query.h"
#include "md5.h"
#include "case-cfn-macros.h"
#include "stringpool.h"
+#include "tree-vrp.h"
#include "tree-ssanames.h"
#include "selftest.h"
-
-#ifndef LOAD_EXTEND_OP
-#define LOAD_EXTEND_OP(M) UNKNOWN
-#endif
+#include "stringpool.h"
+#include "attribs.h"
/* Nonzero if we are folding constants inside an initializer; zero
otherwise. */
static bool negate_expr_p (tree);
static tree negate_expr (tree);
-static tree split_tree (location_t, tree, tree, enum tree_code,
- tree *, tree *, tree *, int);
static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
static enum comparison_code comparison_to_compcode (enum tree_code);
static enum tree_code compcode_to_comparison (enum comparison_code);
-static int operand_equal_for_comparison_p (tree, tree, tree);
static int twoval_comparison_p (tree, tree *, tree *, int *);
static tree eval_subst (location_t, tree, tree, tree, tree, tree);
static tree optimize_bit_field_compare (location_t, enum tree_code,
enum tree_code, tree,
tree, tree,
tree, tree, int);
-static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
-static bool reorder_operands_p (const_tree, const_tree);
static tree fold_negate_const (tree, tree);
static tree fold_not_const (const_tree, tree);
static tree fold_relational_const (enum tree_code, tree, tree, tree);
static tree fold_convert_const (enum tree_code, tree, tree);
static tree fold_view_convert_expr (tree, tree);
-static bool vec_cst_ctor_to_array (tree, tree *);
+static tree fold_negate_expr (location_t, tree);
/* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
switch (TREE_CODE (t))
{
case INTEGER_CST:
- if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
+ if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
return true;
/* Check that -CST will not overflow type. */
if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
return true;
- int count = TYPE_VECTOR_SUBPARTS (type), i;
+ int count = VECTOR_CST_NELTS (t), i;
for (i = 0; i < count; i++)
if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
&& ! TYPE_OVERFLOW_WRAPS (type)))
return false;
/* -(A + B) -> (-B) - A. */
- if (negate_expr_p (TREE_OPERAND (t, 1))
- && reorder_operands_p (TREE_OPERAND (t, 0),
- TREE_OPERAND (t, 1)))
+ if (negate_expr_p (TREE_OPERAND (t, 1)))
return true;
/* -(A + B) -> (-A) - B. */
return negate_expr_p (TREE_OPERAND (t, 0));
return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
&& !HONOR_SIGNED_ZEROS (element_mode (type))
&& (! INTEGRAL_TYPE_P (type)
- || TYPE_OVERFLOW_WRAPS (type))
- && reorder_operands_p (TREE_OPERAND (t, 0),
- TREE_OPERAND (t, 1));
+ || TYPE_OVERFLOW_WRAPS (type));
case MULT_EXPR:
if (TYPE_UNSIGNED (type))
break;
/* INT_MIN/n * n doesn't overflow while negating one operand it does
- if n is a power of two. */
+ if n is a (negative) power of two. */
if (INTEGRAL_TYPE_P (TREE_TYPE (t))
&& ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
&& ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
- && ! integer_pow2p (TREE_OPERAND (t, 0)))
+ && wi::popcount (wi::abs (TREE_OPERAND (t, 0))) != 1)
|| (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
- && ! integer_pow2p (TREE_OPERAND (t, 1)))))
+ && wi::popcount (wi::abs (TREE_OPERAND (t, 1))) != 1)))
break;
/* Fall through. */
returned. */
static tree
-fold_negate_expr (location_t loc, tree t)
+fold_negate_expr_1 (location_t loc, tree t)
{
tree type = TREE_TYPE (t);
tree tem;
case BIT_NOT_EXPR:
if (INTEGRAL_TYPE_P (type))
return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
- build_one_cst (type));
+ build_one_cst (type));
break;
case INTEGER_CST:
case VECTOR_CST:
{
- int count = TYPE_VECTOR_SUBPARTS (type), i;
- tree *elts = XALLOCAVEC (tree, count);
+ int count = VECTOR_CST_NELTS (t), i;
+ auto_vec<tree, 32> elts (count);
for (i = 0; i < count; i++)
{
- elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
- if (elts[i] == NULL_TREE)
+ tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
+ if (elt == NULL_TREE)
return NULL_TREE;
+ elts.quick_push (elt);
}
return build_vector (type, elts);
case COMPLEX_EXPR:
if (negate_expr_p (t))
return fold_build2_loc (loc, COMPLEX_EXPR, type,
- fold_negate_expr (loc, TREE_OPERAND (t, 0)),
- fold_negate_expr (loc, TREE_OPERAND (t, 1)));
+ fold_negate_expr (loc, TREE_OPERAND (t, 0)),
+ fold_negate_expr (loc, TREE_OPERAND (t, 1)));
break;
case CONJ_EXPR:
if (negate_expr_p (t))
return fold_build1_loc (loc, CONJ_EXPR, type,
- fold_negate_expr (loc, TREE_OPERAND (t, 0)));
+ fold_negate_expr (loc, TREE_OPERAND (t, 0)));
break;
case NEGATE_EXPR:
&& !HONOR_SIGNED_ZEROS (element_mode (type)))
{
/* -(A + B) -> (-B) - A. */
- if (negate_expr_p (TREE_OPERAND (t, 1))
- && reorder_operands_p (TREE_OPERAND (t, 0),
- TREE_OPERAND (t, 1)))
+ if (negate_expr_p (TREE_OPERAND (t, 1)))
{
tem = negate_expr (TREE_OPERAND (t, 1));
return fold_build2_loc (loc, MINUS_EXPR, type,
- tem, TREE_OPERAND (t, 0));
+ tem, TREE_OPERAND (t, 0));
}
/* -(A + B) -> (-A) - B. */
{
tem = negate_expr (TREE_OPERAND (t, 0));
return fold_build2_loc (loc, MINUS_EXPR, type,
- tem, TREE_OPERAND (t, 1));
+ tem, TREE_OPERAND (t, 1));
}
}
break;
case MINUS_EXPR:
/* - (A - B) -> B - A */
if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
- && !HONOR_SIGNED_ZEROS (element_mode (type))
- && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
+ && !HONOR_SIGNED_ZEROS (element_mode (type)))
return fold_build2_loc (loc, MINUS_EXPR, type,
- TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
+ TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
break;
case MULT_EXPR:
tem = TREE_OPERAND (t, 1);
if (negate_expr_p (tem))
return fold_build2_loc (loc, TREE_CODE (t), type,
- TREE_OPERAND (t, 0), negate_expr (tem));
+ TREE_OPERAND (t, 0), negate_expr (tem));
tem = TREE_OPERAND (t, 0);
if (negate_expr_p (tem))
return fold_build2_loc (loc, TREE_CODE (t), type,
- negate_expr (tem), TREE_OPERAND (t, 1));
+ negate_expr (tem), TREE_OPERAND (t, 1));
}
break;
return NULL_TREE;
}
+/* A wrapper for fold_negate_expr_1. */
+
+static tree
+fold_negate_expr (location_t loc, tree t)
+{
+ tree type = TREE_TYPE (t);
+ STRIP_SIGN_NOPS (t);
+ tree tem = fold_negate_expr_1 (loc, t);
+ if (tem == NULL_TREE)
+ return NULL_TREE;
+ return fold_convert_loc (loc, type, tem);
+}
+
/* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
negated in a simpler way. Also allow for T to be NULL_TREE, in which case
return NULL_TREE. */
same type as IN, but they will have the same signedness and mode. */
static tree
-split_tree (location_t loc, tree in, tree type, enum tree_code code,
- tree *conp, tree *litp, tree *minus_litp, int negate_p)
+split_tree (tree in, tree type, enum tree_code code,
+ tree *minus_varp, tree *conp, tree *minus_conp,
+ tree *litp, tree *minus_litp, int negate_p)
{
tree var = 0;
-
+ *minus_varp = 0;
*conp = 0;
+ *minus_conp = 0;
*litp = 0;
*minus_litp = 0;
though the C standard doesn't say so) for integers because
the value is not affected. For reals, the value might be
affected, so we can't. */
- && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
- || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
+ && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
+ || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
+ || (code == MINUS_EXPR
+ && (TREE_CODE (in) == PLUS_EXPR
+ || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
{
tree op0 = TREE_OPERAND (in, 0);
tree op1 = TREE_OPERAND (in, 1);
/* Now do any needed negations. */
if (neg_litp_p)
*minus_litp = *litp, *litp = 0;
- if (neg_conp_p)
- *conp = negate_expr (*conp);
+ if (neg_conp_p && *conp)
+ *minus_conp = *conp, *conp = 0;
if (neg_var_p && var)
- {
- /* Convert to TYPE before negating. */
- var = fold_convert_loc (loc, type, var);
- var = negate_expr (var);
- }
+ *minus_varp = var, var = 0;
}
+ else if (TREE_CONSTANT (in))
+ *conp = in;
else if (TREE_CODE (in) == BIT_NOT_EXPR
&& code == PLUS_EXPR)
{
- /* -X - 1 is folded to ~X, undo that here. */
- *minus_litp = build_one_cst (TREE_TYPE (in));
- var = negate_expr (TREE_OPERAND (in, 0));
+ /* -1 - X is folded to ~X, undo that here. Do _not_ do this
+ when IN is constant. */
+ *litp = build_minus_one_cst (type);
+ *minus_varp = TREE_OPERAND (in, 0);
}
- else if (TREE_CONSTANT (in))
- *conp = in;
else
var = in;
*minus_litp = *litp, *litp = 0;
else if (*minus_litp)
*litp = *minus_litp, *minus_litp = 0;
- *conp = negate_expr (*conp);
+ if (*conp)
+ *minus_conp = *conp, *conp = 0;
+ else if (*minus_conp)
+ *conp = *minus_conp, *minus_conp = 0;
if (var)
- {
- /* Convert to TYPE before negating. */
- var = fold_convert_loc (loc, type, var);
- var = negate_expr (var);
- }
+ *minus_varp = var, var = 0;
+ else if (*minus_varp)
+ var = *minus_varp, *minus_varp = 0;
}
+ if (*litp
+ && TREE_OVERFLOW_P (*litp))
+ *litp = drop_tree_overflow (*litp);
+ if (*minus_litp
+ && TREE_OVERFLOW_P (*minus_litp))
+ *minus_litp = drop_tree_overflow (*minus_litp);
+
return var;
}
associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
{
if (t1 == 0)
- return t2;
+ {
+ gcc_assert (t2 == 0 || code != MINUS_EXPR);
+ return t2;
+ }
else if (t2 == 0)
return t1;
try to fold this since we will have infinite recursion. But do
deal with any NEGATE_EXPRs. */
if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
+ || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
|| TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
{
if (code == PLUS_EXPR)
return NULL_TREE;
wide_int w2 = arg2;
f2.data.high = w2.elt (1);
- f2.data.low = w2.elt (0);
+ f2.data.low = w2.ulow ();
f2.mode = SImode;
}
break;
return do_mpc_arg2 (arg1, arg2, type,
/* do_nonfinite= */ folding_initializer,
mpc_div);
- /* Fallthru ... */
+ /* Fallthru. */
case TRUNC_DIV_EXPR:
case CEIL_DIV_EXPR:
case FLOOR_DIV_EXPR:
&& TREE_CODE (arg2) == VECTOR_CST)
{
tree type = TREE_TYPE (arg1);
- int count = TYPE_VECTOR_SUBPARTS (type), i;
- tree *elts = XALLOCAVEC (tree, count);
+ int count = VECTOR_CST_NELTS (arg1), i;
+ auto_vec<tree, 32> elts (count);
for (i = 0; i < count; i++)
{
tree elem1 = VECTOR_CST_ELT (arg1, i);
tree elem2 = VECTOR_CST_ELT (arg2, i);
- elts[i] = const_binop (code, elem1, elem2);
+ tree elt = const_binop (code, elem1, elem2);
/* It is possible that const_binop cannot handle the given
code and return NULL_TREE */
- if (elts[i] == NULL_TREE)
+ if (elt == NULL_TREE)
return NULL_TREE;
+ elts.quick_push (elt);
}
return build_vector (type, elts);
&& TREE_CODE (arg2) == INTEGER_CST)
{
tree type = TREE_TYPE (arg1);
- int count = TYPE_VECTOR_SUBPARTS (type), i;
- tree *elts = XALLOCAVEC (tree, count);
+ int count = VECTOR_CST_NELTS (arg1), i;
+ auto_vec<tree, 32> elts (count);
for (i = 0; i < count; i++)
{
tree elem1 = VECTOR_CST_ELT (arg1, i);
- elts[i] = const_binop (code, elem1, arg2);
+ tree elt = const_binop (code, elem1, arg2);
/* It is possible that const_binop cannot handle the given
code and return NULL_TREE. */
- if (elts[i] == NULL_TREE)
+ if (elt == NULL_TREE)
return NULL_TREE;
+ elts.quick_push (elt);
}
return build_vector (type, elts);
case VEC_PACK_TRUNC_EXPR:
case VEC_PACK_FIX_TRUNC_EXPR:
{
- unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
- tree *elts;
+ unsigned int out_nelts, in_nelts, i;
- gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
- && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
if (TREE_CODE (arg1) != VECTOR_CST
|| TREE_CODE (arg2) != VECTOR_CST)
return NULL_TREE;
- elts = XALLOCAVEC (tree, nelts);
- if (!vec_cst_ctor_to_array (arg1, elts)
- || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
- return NULL_TREE;
+ in_nelts = VECTOR_CST_NELTS (arg1);
+ out_nelts = in_nelts * 2;
+ gcc_assert (in_nelts == VECTOR_CST_NELTS (arg2)
+ && out_nelts == TYPE_VECTOR_SUBPARTS (type));
- for (i = 0; i < nelts; i++)
+ auto_vec<tree, 32> elts (out_nelts);
+ for (i = 0; i < out_nelts; i++)
{
- elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
- ? NOP_EXPR : FIX_TRUNC_EXPR,
- TREE_TYPE (type), elts[i]);
- if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
+ tree elt = (i < in_nelts
+ ? VECTOR_CST_ELT (arg1, i)
+ : VECTOR_CST_ELT (arg2, i - in_nelts));
+ elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
+ ? NOP_EXPR : FIX_TRUNC_EXPR,
+ TREE_TYPE (type), elt);
+ if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
return NULL_TREE;
+ elts.quick_push (elt);
}
return build_vector (type, elts);
case VEC_WIDEN_MULT_EVEN_EXPR:
case VEC_WIDEN_MULT_ODD_EXPR:
{
- unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
- unsigned int out, ofs, scale;
- tree *elts;
+ unsigned int out_nelts, in_nelts, out, ofs, scale;
- gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
- && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
return NULL_TREE;
- elts = XALLOCAVEC (tree, nelts * 4);
- if (!vec_cst_ctor_to_array (arg1, elts)
- || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
- return NULL_TREE;
+ in_nelts = VECTOR_CST_NELTS (arg1);
+ out_nelts = in_nelts / 2;
+ gcc_assert (in_nelts == VECTOR_CST_NELTS (arg2)
+ && out_nelts == TYPE_VECTOR_SUBPARTS (type));
if (code == VEC_WIDEN_MULT_LO_EXPR)
- scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
+ scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
else if (code == VEC_WIDEN_MULT_HI_EXPR)
- scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
+ scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
scale = 1, ofs = 0;
else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
scale = 1, ofs = 1;
- for (out = 0; out < nelts; out++)
+ auto_vec<tree, 32> elts (out_nelts);
+ for (out = 0; out < out_nelts; out++)
{
- unsigned int in1 = (out << scale) + ofs;
- unsigned int in2 = in1 + nelts * 2;
- tree t1, t2;
-
- t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
- t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
+ unsigned int in = (out << scale) + ofs;
+ tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
+ VECTOR_CST_ELT (arg1, in));
+ tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
+ VECTOR_CST_ELT (arg2, in));
if (t1 == NULL_TREE || t2 == NULL_TREE)
return NULL_TREE;
- elts[out] = const_binop (MULT_EXPR, t1, t2);
- if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
+ tree elt = const_binop (MULT_EXPR, t1, t2);
+ if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
return NULL_TREE;
+ elts.quick_push (elt);
}
return build_vector (type, elts);
/* Perform BIT_NOT_EXPR on each element individually. */
else if (TREE_CODE (arg0) == VECTOR_CST)
{
- tree *elements;
tree elem;
unsigned count = VECTOR_CST_NELTS (arg0), i;
- elements = XALLOCAVEC (tree, count);
+ auto_vec<tree, 32> elements (count);
for (i = 0; i < count; i++)
{
elem = VECTOR_CST_ELT (arg0, i);
elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
if (elem == NULL_TREE)
break;
- elements[i] = elem;
+ elements.quick_push (elem);
}
if (i == count)
return build_vector (type, elements);
case VEC_UNPACK_FLOAT_LO_EXPR:
case VEC_UNPACK_FLOAT_HI_EXPR:
{
- unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
- tree *elts;
+ unsigned int out_nelts, in_nelts, i;
enum tree_code subcode;
- gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
if (TREE_CODE (arg0) != VECTOR_CST)
return NULL_TREE;
- elts = XALLOCAVEC (tree, nelts * 2);
- if (!vec_cst_ctor_to_array (arg0, elts))
- return NULL_TREE;
+ in_nelts = VECTOR_CST_NELTS (arg0);
+ out_nelts = in_nelts / 2;
+ gcc_assert (out_nelts == TYPE_VECTOR_SUBPARTS (type));
+ unsigned int offset = 0;
if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
|| code == VEC_UNPACK_FLOAT_LO_EXPR))
- elts += nelts;
+ offset = out_nelts;
if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
subcode = NOP_EXPR;
else
subcode = FLOAT_EXPR;
- for (i = 0; i < nelts; i++)
+ auto_vec<tree, 32> elts (out_nelts);
+ for (i = 0; i < out_nelts; i++)
{
- elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
- if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
+ tree elt = fold_convert_const (subcode, TREE_TYPE (type),
+ VECTOR_CST_ELT (arg0, i + offset));
+ if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
return NULL_TREE;
+ elts.quick_push (elt);
}
return build_vector (type, elts);
case REDUC_PLUS_EXPR:
{
unsigned int nelts, i;
- tree *elts;
enum tree_code subcode;
if (TREE_CODE (arg0) != VECTOR_CST)
return NULL_TREE;
- nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
-
- elts = XALLOCAVEC (tree, nelts);
- if (!vec_cst_ctor_to_array (arg0, elts))
- return NULL_TREE;
+ nelts = VECTOR_CST_NELTS (arg0);
switch (code)
{
default: gcc_unreachable ();
}
+ tree res = VECTOR_CST_ELT (arg0, 0);
for (i = 1; i < nelts; i++)
{
- elts[0] = const_binop (subcode, elts[0], elts[i]);
- if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
+ res = const_binop (subcode, res, VECTOR_CST_ELT (arg0, i));
+ if (res == NULL_TREE || !CONSTANT_CLASS_P (res))
return NULL_TREE;
}
- return elts[0];
+ return res;
}
default:
{
tree t;
double_int temp, temp_trunc;
- unsigned int mode;
+ scalar_mode mode;
/* Right shift FIXED_CST to temp by fbit. */
temp = TREE_FIXED_CST (arg1).data;
REAL_VALUE_TYPE value;
tree t;
- real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
+ real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
+ &TREE_FIXED_CST (arg1));
t = build_real (type, value);
TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
tree t;
bool overflow_p;
- overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
- TYPE_SATURATING (type));
+ overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
+ &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
t = build_fixed (type, value);
/* Propagate overflow flags. */
else
di.high = TREE_INT_CST_ELT (arg1, 1);
- overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
+ overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
TYPE_UNSIGNED (TREE_TYPE (arg1)),
TYPE_SATURATING (type));
t = build_fixed (type, value);
tree t;
bool overflow_p;
- overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
+ overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
&TREE_REAL_CST (arg1),
TYPE_SATURATING (type));
t = build_fixed (type, value);
if (TREE_CODE (arg1) == VECTOR_CST
&& TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
{
- int len = TYPE_VECTOR_SUBPARTS (type);
+ int len = VECTOR_CST_NELTS (arg1);
tree elttype = TREE_TYPE (type);
- tree *v = XALLOCAVEC (tree, len);
+ auto_vec<tree, 32> v (len);
for (int i = 0; i < len; ++i)
{
tree elt = VECTOR_CST_ELT (arg1, i);
tree cvt = fold_convert_const (code, elttype, elt);
if (cvt == NULL_TREE)
return NULL_TREE;
- v[i] = cvt;
+ v.quick_push (cvt);
}
return build_vector (type, v);
}
If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
not values of expressions.
+ If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
+ such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
+
Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
any operand with side effect. This is unnecesarily conservative in the
case we know that arg0 and arg1 are in disjoint code paths (such as in
case CONSTRUCTOR:
/* In GIMPLE empty constructors are allowed in initializers of
aggregates. */
- return (!vec_safe_length (CONSTRUCTOR_ELTS (arg0))
- && !vec_safe_length (CONSTRUCTOR_ELTS (arg1)));
+ return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
default:
break;
}
TREE_OPERAND (arg1, 0), flags));
case COND_EXPR:
- if (! OP_SAME (1) || ! OP_SAME (2))
+ if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
return 0;
flags &= ~OEP_ADDRESS_OF;
return OP_SAME (0);
+ case BIT_INSERT_EXPR:
+ /* BIT_INSERT_EXPR has an implict operand as the type precision
+ of op1. Need to check to make sure they are the same. */
+ if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
+ && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
+ && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
+ != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
+ return false;
+ /* FALLTHRU */
+
case VEC_COND_EXPR:
case DOT_PROD_EXPR:
- case BIT_INSERT_EXPR:
return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
+ case MODIFY_EXPR:
+ case INIT_EXPR:
+ case COMPOUND_EXPR:
+ case PREDECREMENT_EXPR:
+ case PREINCREMENT_EXPR:
+ case POSTDECREMENT_EXPR:
+ case POSTINCREMENT_EXPR:
+ if (flags & OEP_LEXICOGRAPHIC)
+ return OP_SAME (0) && OP_SAME (1);
+ return 0;
+
+ case CLEANUP_POINT_EXPR:
+ case EXPR_STMT:
+ if (flags & OEP_LEXICOGRAPHIC)
+ return OP_SAME (0);
+ return 0;
+
default:
return 0;
}
cef &= ECF_CONST | ECF_PURE;
else
cef &= ECF_CONST;
- if (!cef)
+ if (!cef && !(flags & OEP_LEXICOGRAPHIC))
return 0;
}
}
return 1;
}
+ else if (TREE_CODE (arg0) == STATEMENT_LIST
+ && (flags & OEP_LEXICOGRAPHIC))
+ {
+ /* Compare the STATEMENT_LISTs. */
+ tree_stmt_iterator tsi1, tsi2;
+ tree body1 = CONST_CAST_TREE (arg0);
+ tree body2 = CONST_CAST_TREE (arg1);
+ for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
+ tsi_next (&tsi1), tsi_next (&tsi2))
+ {
+ /* The lists don't have the same number of statements. */
+ if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
+ return 0;
+ if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
+ return 1;
+ if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
+ OEP_LEXICOGRAPHIC))
+ return 0;
+ }
+ }
return 0;
+ case tcc_statement:
+ switch (TREE_CODE (arg0))
+ {
+ case RETURN_EXPR:
+ if (flags & OEP_LEXICOGRAPHIC)
+ return OP_SAME_WITH_NULL (0);
+ return 0;
+ default:
+ return 0;
+ }
+
default:
return 0;
}
#undef OP_SAME_WITH_NULL
}
\f
-/* Similar to operand_equal_p, but see if ARG0 might have been made by
- shorten_compare from ARG1 when ARG1 was being compared with OTHER.
-
- When in doubt, return 0. */
+/* Similar to operand_equal_p, but strip nops first. */
-static int
-operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
+static bool
+operand_equal_for_comparison_p (tree arg0, tree arg1)
{
- int unsignedp1, unsignedpo;
- tree primarg0, primarg1, primother;
- unsigned int correct_width;
-
if (operand_equal_p (arg0, arg1, 0))
- return 1;
+ return true;
if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
|| ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
- return 0;
+ return false;
/* Discard any conversions that don't change the modes of ARG0 and ARG1
and see if the inner values are the same. This removes any
signedness comparison, which doesn't matter here. */
- primarg0 = arg0, primarg1 = arg1;
- STRIP_NOPS (primarg0);
- STRIP_NOPS (primarg1);
- if (operand_equal_p (primarg0, primarg1, 0))
- return 1;
-
- /* Duplicate what shorten_compare does to ARG1 and see if that gives the
- actual comparison operand, ARG0.
-
- First throw away any conversions to wider types
- already present in the operands. */
-
- primarg1 = get_narrower (arg1, &unsignedp1);
- primother = get_narrower (other, &unsignedpo);
-
- correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
- if (unsignedp1 == unsignedpo
- && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
- && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
- {
- tree type = TREE_TYPE (arg0);
-
- /* Make sure shorter operand is extended the right way
- to match the longer operand. */
- primarg1 = fold_convert (signed_or_unsigned_type_for
- (unsignedp1, TREE_TYPE (primarg1)), primarg1);
-
- if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
- return 1;
- }
+ STRIP_NOPS (arg0);
+ STRIP_NOPS (arg1);
+ if (operand_equal_p (arg0, arg1, 0))
+ return true;
- return 0;
+ return false;
}
\f
/* See if ARG is an expression that is either a comparison or is performing
if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
- /* ... fall through ... */
+ /* fall through */
case FLOAT_EXPR:
loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
{
tree result, bftype;
- if (get_alias_set (inner) != get_alias_set (orig_inner))
+ /* Attempt not to lose the access path if possible. */
+ if (TREE_CODE (orig_inner) == COMPONENT_REF)
+ {
+ tree ninner = TREE_OPERAND (orig_inner, 0);
+ machine_mode nmode;
+ HOST_WIDE_INT nbitsize, nbitpos;
+ tree noffset;
+ int nunsignedp, nreversep, nvolatilep = 0;
+ tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
+ &noffset, &nmode, &nunsignedp,
+ &nreversep, &nvolatilep);
+ if (base == inner
+ && noffset == NULL_TREE
+ && nbitsize >= bitsize
+ && nbitpos <= bitpos
+ && bitpos + bitsize <= nbitpos + nbitsize
+ && !reversep
+ && !nreversep
+ && !nvolatilep)
+ {
+ inner = ninner;
+ bitpos -= nbitpos;
+ }
+ }
+
+ alias_set_type iset = get_alias_set (orig_inner);
+ if (iset == 0 && get_alias_set (inner) != iset)
inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
build_fold_addr_expr (inner),
- build_int_cst
- (reference_alias_ptr_type (orig_inner), 0));
+ build_int_cst (ptr_type_node, 0));
if (bitpos == 0 && !reversep)
{
bftype = build_nonstandard_integer_type (bitsize, 0);
result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
- size_int (bitsize), bitsize_int (bitpos));
+ bitsize_int (bitsize), bitsize_int (bitpos));
REF_REVERSE_STORAGE_ORDER (result) = reversep;
if (bftype != type)
tree type = TREE_TYPE (lhs);
tree unsigned_type;
int const_p = TREE_CODE (rhs) == INTEGER_CST;
- machine_mode lmode, rmode, nmode;
+ machine_mode lmode, rmode;
+ scalar_int_mode nmode;
int lunsignedp, runsignedp;
int lreversep, rreversep;
int lvolatilep = 0, rvolatilep = 0;
/* See if we can find a mode to refer to this field. We should be able to,
but fail if we can't. */
- nmode = get_best_mode (lbitsize, lbitpos, bitstart, bitend,
- const_p ? TYPE_ALIGN (TREE_TYPE (linner))
- : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
- TYPE_ALIGN (TREE_TYPE (rinner))),
- word_mode, false);
- if (nmode == VOIDmode)
+ if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
+ const_p ? TYPE_ALIGN (TREE_TYPE (linner))
+ : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
+ TYPE_ALIGN (TREE_TYPE (rinner))),
+ BITS_PER_WORD, false, &nmode))
return 0;
/* Set signed and unsigned types of the precision of this mode for the
punsignedp, preversep, pvolatilep);
if ((inner == exp && and_mask == 0)
|| *pbitsize < 0 || offset != 0
- || TREE_CODE (inner) == PLACEHOLDER_EXPR)
+ || TREE_CODE (inner) == PLACEHOLDER_EXPR
+ /* Reject out-of-bound accesses (PR79731). */
+ || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
+ && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
+ *pbitpos + *pbitsize) < 0))
return 0;
*exp_ = exp;
*pin_p = in_p, *plow = low, *phigh = high;
return exp;
}
+
+/* Returns TRUE if [LOW, HIGH] range check can be optimized to
+ a bitwise check i.e. when
+ LOW == 0xXX...X00...0
+ HIGH == 0xXX...X11...1
+ Return corresponding mask in MASK and stem in VALUE. */
+
+static bool
+maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
+ tree *value)
+{
+ if (TREE_CODE (low) != INTEGER_CST
+ || TREE_CODE (high) != INTEGER_CST)
+ return false;
+
+ unsigned prec = TYPE_PRECISION (type);
+ wide_int lo = wi::to_wide (low, prec);
+ wide_int hi = wi::to_wide (high, prec);
+
+ wide_int end_mask = lo ^ hi;
+ if ((end_mask & (end_mask + 1)) != 0
+ || (lo & end_mask) != 0)
+ return false;
+
+ wide_int stem_mask = ~end_mask;
+ wide_int stem = lo & stem_mask;
+ if (stem != (hi & stem_mask))
+ return false;
+
+ *mask = wide_int_to_tree (type, stem_mask);
+ *value = wide_int_to_tree (type, stem);
+
+ return true;
+}
\f
+/* Helper routine for build_range_check and match.pd. Return the type to
+ perform the check or NULL if it shouldn't be optimized. */
+
+tree
+range_check_type (tree etype)
+{
+ /* First make sure that arithmetics in this type is valid, then make sure
+ that it wraps around. */
+ if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
+ etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
+ TYPE_UNSIGNED (etype));
+
+ if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
+ {
+ tree utype, minv, maxv;
+
+ /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
+ for the type in question, as we rely on this here. */
+ utype = unsigned_type_for (etype);
+ maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
+ maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
+ build_int_cst (TREE_TYPE (maxv), 1), 1);
+ minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
+
+ if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
+ minv, 1, maxv, 1)))
+ etype = utype;
+ else
+ return NULL_TREE;
+ }
+ return etype;
+}
+
/* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
type, TYPE, return an expression to test if EXP is in (or out of, depending
on IN_P) the range. Return 0 if the test couldn't be created. */
build_range_check (location_t loc, tree type, tree exp, int in_p,
tree low, tree high)
{
- tree etype = TREE_TYPE (exp), value;
+ tree etype = TREE_TYPE (exp), mask, value;
/* Disable this optimization for function pointer expressions
on targets that require function pointer canonicalization. */
if (low == 0)
return fold_build2_loc (loc, LE_EXPR, type, exp,
- fold_convert_loc (loc, etype, high));
+ fold_convert_loc (loc, etype, high));
if (high == 0)
return fold_build2_loc (loc, GE_EXPR, type, exp,
- fold_convert_loc (loc, etype, low));
+ fold_convert_loc (loc, etype, low));
if (operand_equal_p (low, high, 0))
return fold_build2_loc (loc, EQ_EXPR, type, exp,
- fold_convert_loc (loc, etype, low));
+ fold_convert_loc (loc, etype, low));
+
+ if (TREE_CODE (exp) == BIT_AND_EXPR
+ && maskable_range_p (low, high, etype, &mask, &value))
+ return fold_build2_loc (loc, EQ_EXPR, type,
+ fold_build2_loc (loc, BIT_AND_EXPR, etype,
+ exp, mask),
+ value);
if (integer_zerop (low))
{
exp = fold_convert_loc (loc, etype, exp);
}
return fold_build2_loc (loc, GT_EXPR, type, exp,
- build_int_cst (etype, 0));
+ build_int_cst (etype, 0));
}
}
/* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
- This requires wrap-around arithmetics for the type of the expression.
- First make sure that arithmetics in this type is valid, then make sure
- that it wraps around. */
- if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
- etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
- TYPE_UNSIGNED (etype));
-
- if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
- {
- tree utype, minv, maxv;
-
- /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
- for the type in question, as we rely on this here. */
- utype = unsigned_type_for (etype);
- maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
- maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
- build_int_cst (TREE_TYPE (maxv), 1), 1);
- minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
+ This requires wrap-around arithmetics for the type of the expression. */
+ etype = range_check_type (etype);
+ if (etype == NULL_TREE)
+ return NULL_TREE;
- if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
- minv, 1, maxv, 1)))
- etype = utype;
- else
- return 0;
- }
+ if (POINTER_TYPE_P (etype))
+ etype = unsigned_type_for (etype);
high = fold_convert_loc (loc, etype, high);
low = fold_convert_loc (loc, etype, low);
value = const_binop (MINUS_EXPR, high, low);
-
- if (POINTER_TYPE_P (etype))
- {
- if (value != 0 && !TREE_OVERFLOW (value))
- {
- low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
- return build_range_check (loc, type,
- fold_build_pointer_plus_loc (loc, exp, low),
- 1, build_int_cst (etype, 0), value);
- }
- return 0;
- }
-
if (value != 0 && !TREE_OVERFLOW (value))
return build_range_check (loc, type,
fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
case EQ_EXPR:
case UNEQ_EXPR:
tem = fold_convert_loc (loc, arg1_type, arg1);
- return pedantic_non_lvalue_loc (loc,
- fold_convert_loc (loc, type,
- negate_expr (tem)));
+ return fold_convert_loc (loc, type, negate_expr (tem));
case NE_EXPR:
case LTGT_EXPR:
- return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
+ return fold_convert_loc (loc, type, arg1);
case UNGE_EXPR:
case UNGT_EXPR:
if (flag_trapping_math)
if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
break;
tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
- return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
+ return fold_convert_loc (loc, type, tem);
case UNLE_EXPR:
case UNLT_EXPR:
if (flag_trapping_math)
break;
+ /* FALLTHRU */
case LE_EXPR:
case LT_EXPR:
if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
&& integer_zerop (arg01) && integer_zerop (arg2))
{
if (comp_code == NE_EXPR)
- return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
+ return fold_convert_loc (loc, type, arg1);
else if (comp_code == EQ_EXPR)
return build_zero_cst (type);
}
expressions will be false, so all four give B. The min()
and max() versions would give a NaN instead. */
if (!HONOR_SIGNED_ZEROS (element_mode (type))
- && operand_equal_for_comparison_p (arg01, arg2, arg00)
+ && operand_equal_for_comparison_p (arg01, arg2)
/* Avoid these transformations if the COND_EXPR may be used
as an lvalue in the C++ front-end. PR c++/19199. */
&& (in_gimple_form
tree comp_op1 = arg01;
tree comp_type = TREE_TYPE (comp_op0);
- /* Avoid adding NOP_EXPRs in case this is an lvalue. */
- if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
- {
- comp_type = type;
- comp_op0 = arg1;
- comp_op1 = arg2;
- }
-
switch (comp_code)
{
case EQ_EXPR:
- return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
+ return fold_convert_loc (loc, type, arg2);
case NE_EXPR:
- return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
+ return fold_convert_loc (loc, type, arg1);
case LE_EXPR:
case LT_EXPR:
case UNLE_EXPR:
? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
: fold_build2_loc (loc, MIN_EXPR, comp_type,
comp_op1, comp_op0);
- return pedantic_non_lvalue_loc (loc,
- fold_convert_loc (loc, type, tem));
+ return fold_convert_loc (loc, type, tem);
}
break;
case GE_EXPR:
? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
: fold_build2_loc (loc, MAX_EXPR, comp_type,
comp_op1, comp_op0);
- return pedantic_non_lvalue_loc (loc,
- fold_convert_loc (loc, type, tem));
+ return fold_convert_loc (loc, type, tem);
}
break;
case UNEQ_EXPR:
if (!HONOR_NANS (arg1))
- return pedantic_non_lvalue_loc (loc,
- fold_convert_loc (loc, type, arg2));
+ return fold_convert_loc (loc, type, arg2);
break;
case LTGT_EXPR:
if (!HONOR_NANS (arg1))
- return pedantic_non_lvalue_loc (loc,
- fold_convert_loc (loc, type, arg1));
+ return fold_convert_loc (loc, type, arg1);
break;
default:
gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
}
}
- /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
- we might still be able to simplify this. For example,
- if C1 is one less or one more than C2, this might have started
- out as a MIN or MAX and been transformed by this function.
- Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
-
- if (INTEGRAL_TYPE_P (type)
- && TREE_CODE (arg01) == INTEGER_CST
- && TREE_CODE (arg2) == INTEGER_CST)
- switch (comp_code)
- {
- case EQ_EXPR:
- if (TREE_CODE (arg1) == INTEGER_CST)
- break;
- /* We can replace A with C1 in this case. */
- arg1 = fold_convert_loc (loc, type, arg01);
- return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
-
- case LT_EXPR:
- /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
- MIN_EXPR, to preserve the signedness of the comparison. */
- if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
- OEP_ONLY_CONST)
- && operand_equal_p (arg01,
- const_binop (PLUS_EXPR, arg2,
- build_int_cst (type, 1)),
- OEP_ONLY_CONST))
- {
- tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
- fold_convert_loc (loc, TREE_TYPE (arg00),
- arg2));
- return pedantic_non_lvalue_loc (loc,
- fold_convert_loc (loc, type, tem));
- }
- break;
-
- case LE_EXPR:
- /* If C1 is C2 - 1, this is min(A, C2), with the same care
- as above. */
- if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
- OEP_ONLY_CONST)
- && operand_equal_p (arg01,
- const_binop (MINUS_EXPR, arg2,
- build_int_cst (type, 1)),
- OEP_ONLY_CONST))
- {
- tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
- fold_convert_loc (loc, TREE_TYPE (arg00),
- arg2));
- return pedantic_non_lvalue_loc (loc,
- fold_convert_loc (loc, type, tem));
- }
- break;
-
- case GT_EXPR:
- /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
- MAX_EXPR, to preserve the signedness of the comparison. */
- if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
- OEP_ONLY_CONST)
- && operand_equal_p (arg01,
- const_binop (MINUS_EXPR, arg2,
- build_int_cst (type, 1)),
- OEP_ONLY_CONST))
- {
- tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
- fold_convert_loc (loc, TREE_TYPE (arg00),
- arg2));
- return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
- }
- break;
-
- case GE_EXPR:
- /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
- if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
- OEP_ONLY_CONST)
- && operand_equal_p (arg01,
- const_binop (PLUS_EXPR, arg2,
- build_int_cst (type, 1)),
- OEP_ONLY_CONST))
- {
- tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
- fold_convert_loc (loc, TREE_TYPE (arg00),
- arg2));
- return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
- }
- break;
- case NE_EXPR:
- break;
- default:
- gcc_unreachable ();
- }
-
return NULL_TREE;
}
short-circuited branch and the underlying object on both sides
is the same, make a non-short-circuit operation. */
else if (LOGICAL_OP_NON_SHORT_CIRCUIT
+ && !flag_sanitize_coverage
&& lhs != 0 && rhs != 0
&& (code == TRUTH_ANDIF_EXPR
|| code == TRUTH_ORIF_EXPR)
unextend (tree c, int p, int unsignedp, tree mask)
{
tree type = TREE_TYPE (c);
- int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
+ int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
tree temp;
if (p == modesize || unsignedp)
int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
- machine_mode lnmode, rnmode;
+ scalar_int_mode lnmode, rnmode;
tree ll_mask, lr_mask, rl_mask, rr_mask;
tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
tree l_const, r_const;
to be relative to a field of that size. */
first_bit = MIN (ll_bitpos, rl_bitpos);
end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
- lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
- TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
- volatilep);
- if (lnmode == VOIDmode)
+ if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
+ TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
+ volatilep, &lnmode))
return 0;
lnbitsize = GET_MODE_BITSIZE (lnmode);
first_bit = MIN (lr_bitpos, rr_bitpos);
end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
- rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
- TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
- volatilep);
- if (rnmode == VOIDmode)
+ if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
+ TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
+ volatilep, &rnmode))
return 0;
rnbitsize = GET_MODE_BITSIZE (rnmode);
{
tree type = TREE_TYPE (t);
enum tree_code tcode = TREE_CODE (t);
- tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
- > GET_MODE_SIZE (TYPE_MODE (type)))
+ tree ctype = (wide_type != 0
+ && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
+ > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
? wide_type : type);
tree t1, t2;
int same_p = tcode == code;
t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
if (t1 != 0 && t2 != 0
+ && TYPE_OVERFLOW_WRAPS (ctype)
&& (code == MULT_EXPR
/* If not multiplication, we can only do this if both operands
are divisible by c. */
if (TYPE_UNSIGNED (ctype) && ctype != type)
break;
- /* If we were able to eliminate our operation from the first side,
- apply our operation to the second side and reform the PLUS. */
- if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
- return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
-
/* The last case is if we are a multiply. In that case, we can
apply the distributive law to commute the multiply and addition
if the multiplication of the constants doesn't overflow
new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
do something only if the second operand is a constant. */
if (same_p
+ && TYPE_OVERFLOW_WRAPS (ctype)
&& (t1 = extract_muldiv (op0, c, code, wide_type,
strict_overflow_p)) != 0)
return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
fold_convert (ctype, op1));
else if (tcode == MULT_EXPR && code == MULT_EXPR
+ && TYPE_OVERFLOW_WRAPS (ctype)
&& (t1 = extract_muldiv (op1, c, code, wide_type,
strict_overflow_p)) != 0)
return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
}
-/* Subroutine of fold() that optimizes comparisons of a division by
+/* Subroutine of match.pd that optimizes comparisons of a division by
a nonzero integer constant against an integer constant, i.e.
X/C1 op C2.
CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
- GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
- are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
-
- The function returns the constant folded tree if a simplification
- can be made, and NULL_TREE otherwise. */
+ GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
-static tree
-fold_div_compare (location_t loc,
- enum tree_code code, tree type, tree arg0, tree arg1)
+enum tree_code
+fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
+ tree *hi, bool *neg_overflow)
{
- tree prod, tmp, hi, lo;
- tree arg00 = TREE_OPERAND (arg0, 0);
- tree arg01 = TREE_OPERAND (arg0, 1);
- signop sign = TYPE_SIGN (TREE_TYPE (arg0));
- bool neg_overflow = false;
+ tree prod, tmp, type = TREE_TYPE (c1);
+ signop sign = TYPE_SIGN (type);
bool overflow;
/* We have to do this the hard way to detect unsigned overflow.
- prod = int_const_binop (MULT_EXPR, arg01, arg1); */
- wide_int val = wi::mul (arg01, arg1, sign, &overflow);
- prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
- neg_overflow = false;
+ prod = int_const_binop (MULT_EXPR, c1, c2); */
+ wide_int val = wi::mul (c1, c2, sign, &overflow);
+ prod = force_fit_type (type, val, -1, overflow);
+ *neg_overflow = false;
if (sign == UNSIGNED)
{
- tmp = int_const_binop (MINUS_EXPR, arg01,
- build_int_cst (TREE_TYPE (arg01), 1));
- lo = prod;
+ tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
+ *lo = prod;
- /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
+ /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
val = wi::add (prod, tmp, sign, &overflow);
- hi = force_fit_type (TREE_TYPE (arg00), val,
- -1, overflow | TREE_OVERFLOW (prod));
+ *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
}
- else if (tree_int_cst_sgn (arg01) >= 0)
+ else if (tree_int_cst_sgn (c1) >= 0)
{
- tmp = int_const_binop (MINUS_EXPR, arg01,
- build_int_cst (TREE_TYPE (arg01), 1));
- switch (tree_int_cst_sgn (arg1))
+ tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
+ switch (tree_int_cst_sgn (c2))
{
case -1:
- neg_overflow = true;
- lo = int_const_binop (MINUS_EXPR, prod, tmp);
- hi = prod;
+ *neg_overflow = true;
+ *lo = int_const_binop (MINUS_EXPR, prod, tmp);
+ *hi = prod;
break;
- case 0:
- lo = fold_negate_const (tmp, TREE_TYPE (arg0));
- hi = tmp;
+ case 0:
+ *lo = fold_negate_const (tmp, type);
+ *hi = tmp;
break;
- case 1:
- hi = int_const_binop (PLUS_EXPR, prod, tmp);
- lo = prod;
+ case 1:
+ *hi = int_const_binop (PLUS_EXPR, prod, tmp);
+ *lo = prod;
break;
default:
/* A negative divisor reverses the relational operators. */
code = swap_tree_comparison (code);
- tmp = int_const_binop (PLUS_EXPR, arg01,
- build_int_cst (TREE_TYPE (arg01), 1));
- switch (tree_int_cst_sgn (arg1))
+ tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
+ switch (tree_int_cst_sgn (c2))
{
case -1:
- hi = int_const_binop (MINUS_EXPR, prod, tmp);
- lo = prod;
+ *hi = int_const_binop (MINUS_EXPR, prod, tmp);
+ *lo = prod;
break;
- case 0:
- hi = fold_negate_const (tmp, TREE_TYPE (arg0));
- lo = tmp;
+ case 0:
+ *hi = fold_negate_const (tmp, type);
+ *lo = tmp;
break;
- case 1:
- neg_overflow = true;
- lo = int_const_binop (PLUS_EXPR, prod, tmp);
- hi = prod;
+ case 1:
+ *neg_overflow = true;
+ *lo = int_const_binop (PLUS_EXPR, prod, tmp);
+ *hi = prod;
break;
default:
}
}
- switch (code)
- {
- case EQ_EXPR:
- if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
- return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
- if (TREE_OVERFLOW (hi))
- return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
- if (TREE_OVERFLOW (lo))
- return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
- return build_range_check (loc, type, arg00, 1, lo, hi);
-
- case NE_EXPR:
- if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
- return omit_one_operand_loc (loc, type, integer_one_node, arg00);
- if (TREE_OVERFLOW (hi))
- return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
- if (TREE_OVERFLOW (lo))
- return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
- return build_range_check (loc, type, arg00, 0, lo, hi);
-
- case LT_EXPR:
- if (TREE_OVERFLOW (lo))
- {
- tmp = neg_overflow ? integer_zero_node : integer_one_node;
- return omit_one_operand_loc (loc, type, tmp, arg00);
- }
- return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
-
- case LE_EXPR:
- if (TREE_OVERFLOW (hi))
- {
- tmp = neg_overflow ? integer_zero_node : integer_one_node;
- return omit_one_operand_loc (loc, type, tmp, arg00);
- }
- return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
+ if (code != EQ_EXPR && code != NE_EXPR)
+ return code;
- case GT_EXPR:
- if (TREE_OVERFLOW (hi))
- {
- tmp = neg_overflow ? integer_one_node : integer_zero_node;
- return omit_one_operand_loc (loc, type, tmp, arg00);
- }
- return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
+ if (TREE_OVERFLOW (*lo)
+ || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
+ *lo = NULL_TREE;
+ if (TREE_OVERFLOW (*hi)
+ || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
+ *hi = NULL_TREE;
- case GE_EXPR:
- if (TREE_OVERFLOW (lo))
- {
- tmp = neg_overflow ? integer_one_node : integer_zero_node;
- return omit_one_operand_loc (loc, type, tmp, arg00);
- }
- return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
-
- default:
- break;
- }
-
- return NULL_TREE;
+ return code;
}
if (arg00 != NULL_TREE
/* This is only a win if casting to a signed type is cheap,
i.e. when arg00's type is not a partial mode. */
- && TYPE_PRECISION (TREE_TYPE (arg00))
- == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
+ && type_has_mode_precision_p (TREE_TYPE (arg00)))
{
tree stype = signed_type_for (TREE_TYPE (arg00));
return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
tree inner = TREE_OPERAND (arg0, 0);
tree type = TREE_TYPE (arg0);
int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
- machine_mode operand_mode = TYPE_MODE (type);
+ scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
int ops_unsigned;
tree signed_type, unsigned_type, intermediate_type;
tree tem, one;
/* If we are going to be able to omit the AND below, we must do our
operations as unsigned. If we must use the AND, we have a choice.
Normally unsigned is faster, but for some machines signed is. */
- ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
+ ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
&& !flag_syntax_only) ? 0 : 1;
signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
return NULL_TREE;
}
-/* Check whether we are allowed to reorder operands arg0 and arg1,
- such that the evaluation of arg1 occurs before arg0. */
-
-static bool
-reorder_operands_p (const_tree arg0, const_tree arg1)
-{
- if (! flag_evaluation_order)
- return true;
- if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
- return true;
- return ! TREE_SIDE_EFFECTS (arg0)
- && ! TREE_SIDE_EFFECTS (arg1);
-}
-
/* Test whether it is preferable two swap two operands, ARG0 and
ARG1, for example because ARG0 is an integer constant and ARG1
- isn't. If REORDER is true, only recommend swapping if we can
- evaluate the operands in reverse order. */
+ isn't. */
bool
-tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
+tree_swap_operands_p (const_tree arg0, const_tree arg1)
{
if (CONSTANT_CLASS_P (arg1))
return 0;
if (TREE_CONSTANT (arg0))
return 1;
- if (reorder && flag_evaluation_order
- && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
- return 0;
-
/* It is preferable to swap two SSA_NAME to ensure a canonical form
for commutative and comparison operators. Ensuring a canonical
form allows the optimizers to find additional redundancies without
}
same = NULL_TREE;
- if (operand_equal_p (arg01, arg11, 0))
- same = arg01, alt0 = arg00, alt1 = arg10;
- else if (operand_equal_p (arg00, arg10, 0))
+ /* Prefer factoring a common non-constant. */
+ if (operand_equal_p (arg00, arg10, 0))
same = arg00, alt0 = arg01, alt1 = arg11;
+ else if (operand_equal_p (arg01, arg11, 0))
+ same = arg01, alt0 = arg00, alt1 = arg10;
else if (operand_equal_p (arg00, arg11, 0))
same = arg00, alt0 = arg01, alt1 = arg10;
else if (operand_equal_p (arg01, arg10, 0))
}
}
- if (same)
+ if (!same)
+ return NULL_TREE;
+
+ if (! INTEGRAL_TYPE_P (type)
+ || TYPE_OVERFLOW_WRAPS (type)
+ /* We are neither factoring zero nor minus one. */
+ || TREE_CODE (same) == INTEGER_CST)
return fold_build2_loc (loc, MULT_EXPR, type,
fold_build2_loc (loc, code, type,
fold_convert_loc (loc, type, alt0),
fold_convert_loc (loc, type, alt1)),
fold_convert_loc (loc, type, same));
- return NULL_TREE;
+ /* Same may be zero and thus the operation 'code' may overflow. Likewise
+ same may be minus one and thus the multiplication may overflow. Perform
+ the operations in an unsigned type. */
+ tree utype = unsigned_type_for (type);
+ tree tem = fold_build2_loc (loc, code, utype,
+ fold_convert_loc (loc, utype, alt0),
+ fold_convert_loc (loc, utype, alt1));
+ /* If the sum evaluated to a constant that is not -INF the multiplication
+ cannot overflow. */
+ if (TREE_CODE (tem) == INTEGER_CST
+ && ! wi::eq_p (tem, wi::min_value (TYPE_PRECISION (utype), SIGNED)))
+ return fold_build2_loc (loc, MULT_EXPR, type,
+ fold_convert (type, tem), same);
+
+ return fold_convert_loc (loc, type,
+ fold_build2_loc (loc, MULT_EXPR, utype, tem,
+ fold_convert_loc (loc, utype, same)));
}
/* Subroutine of native_encode_expr. Encode the INTEGER_CST
native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
{
tree type = TREE_TYPE (expr);
- int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
+ int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
int byte, offset, word, words;
unsigned char value;
native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
{
tree type = TREE_TYPE (expr);
- machine_mode mode = TYPE_MODE (type);
+ scalar_mode mode = SCALAR_TYPE_MODE (type);
int total_bytes = GET_MODE_SIZE (mode);
FIXED_VALUE_TYPE value;
tree i_value, i_type;
native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
{
tree type = TREE_TYPE (expr);
- int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
+ int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
int byte, offset, word, words, bitpos;
unsigned char value;
offset += byte % UNITS_PER_WORD;
}
else
- offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
+ {
+ offset = byte;
+ if (BYTES_BIG_ENDIAN)
+ {
+ /* Reverse bytes within each long, or within the entire float
+ if it's smaller than a long (for HFmode). */
+ offset = MIN (3, total_bytes - 1) - offset;
+ gcc_assert (offset >= 0);
+ }
+ }
offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
if (offset >= off
&& offset - off < len)
return 0;
part = TREE_IMAGPART (expr);
if (off != -1)
- off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
+ off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
if (off == -1
&& isize != rsize)
offset = 0;
count = VECTOR_CST_NELTS (expr);
itype = TREE_TYPE (TREE_TYPE (expr));
- size = GET_MODE_SIZE (TYPE_MODE (itype));
+ size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
for (i = 0; i < count; i++)
{
if (off >= size)
static int
native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
{
- tree type = TREE_TYPE (expr);
- HOST_WIDE_INT total_bytes;
-
- if (TREE_CODE (type) != ARRAY_TYPE
- || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
- || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
- || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
+ if (! can_native_encode_string_p (expr))
return 0;
- total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
+
+ HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
if ((off == -1 && total_bytes > len)
|| off >= total_bytes)
return 0;
static tree
native_interpret_int (tree type, const unsigned char *ptr, int len)
{
- int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
+ int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
if (total_bytes > len
|| total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
static tree
native_interpret_fixed (tree type, const unsigned char *ptr, int len)
{
- int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
+ scalar_mode mode = SCALAR_TYPE_MODE (type);
+ int total_bytes = GET_MODE_SIZE (mode);
double_int result;
FIXED_VALUE_TYPE fixed_value;
return NULL_TREE;
result = double_int::from_buffer (ptr, total_bytes);
- fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
+ fixed_value = fixed_from_double_int (result, mode);
return build_fixed (type, fixed_value);
}
static tree
native_interpret_real (tree type, const unsigned char *ptr, int len)
{
- machine_mode mode = TYPE_MODE (type);
+ scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
int total_bytes = GET_MODE_SIZE (mode);
unsigned char value;
/* There are always 32 bits in each long, no matter the size of
REAL_VALUE_TYPE r;
long tmp[6];
- total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
if (total_bytes > len || total_bytes > 24)
return NULL_TREE;
int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
int size;
etype = TREE_TYPE (type);
- size = GET_MODE_SIZE (TYPE_MODE (etype));
+ size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
if (size * 2 > len)
return NULL_TREE;
rpart = native_interpret_expr (etype, ptr, size);
{
tree etype, elem;
int i, size, count;
- tree *elements;
etype = TREE_TYPE (type);
- size = GET_MODE_SIZE (TYPE_MODE (etype));
+ size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
count = TYPE_VECTOR_SUBPARTS (type);
if (size * count > len)
return NULL_TREE;
- elements = XALLOCAVEC (tree, count);
- for (i = count - 1; i >= 0; i--)
+ auto_vec<tree, 32> elements (count);
+ for (i = 0; i < count; ++i)
{
elem = native_interpret_expr (etype, ptr+(i*size), size);
if (!elem)
return NULL_TREE;
- elements[i] = elem;
+ elements.quick_push (elem);
}
return build_vector (type, elements);
}
}
}
+/* Return true iff a constant of type TYPE is accepted by
+ native_encode_expr. */
+
+bool
+can_native_encode_type_p (tree type)
+{
+ switch (TREE_CODE (type))
+ {
+ case INTEGER_TYPE:
+ case REAL_TYPE:
+ case FIXED_POINT_TYPE:
+ case COMPLEX_TYPE:
+ case VECTOR_TYPE:
+ case POINTER_TYPE:
+ return true;
+ default:
+ return false;
+ }
+}
+
+/* Return true iff a STRING_CST S is accepted by
+ native_encode_expr. */
+
+bool
+can_native_encode_string_p (const_tree expr)
+{
+ tree type = TREE_TYPE (expr);
+
+ /* Wide-char strings are encoded in target byte-order so native
+ encoding them is trivial. */
+ if (BITS_PER_UNIT != CHAR_BIT
+ || TREE_CODE (type) != ARRAY_TYPE
+ || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
+ || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
+ return false;
+ return true;
+}
+
/* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
TYPE at compile-time. If we're unable to perform the conversion
return NULL_TREE. */
change = (cst == 0);
if (change
&& !flag_syntax_only
- && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
+ && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
== ZERO_EXTEND))
{
tree uns = unsigned_type_for (TREE_TYPE (and0));
if (TREE_CODE (op0) == ADDR_EXPR)
{
tree op00 = TREE_OPERAND (op0, 0);
- if ((TREE_CODE (op00) == VAR_DECL
+ if ((VAR_P (op00)
|| TREE_CODE (op00) == PARM_DECL
|| TREE_CODE (op00) == RESULT_DECL)
&& !TREE_READONLY (op00))
return tem;
if (LOGICAL_OP_NON_SHORT_CIRCUIT
+ && !flag_sanitize_coverage
&& (code == TRUTH_AND_EXPR
|| code == TRUTH_ANDIF_EXPR
|| code == TRUTH_OR_EXPR
return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
tem);
}
- /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
+ /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
else if (TREE_CODE (arg1) == icode
&& simple_operand_p_2 (arg0)
/* Return a positive integer when the symbol DECL is known to have
a nonzero address, zero when it's known not to (e.g., it's a weak
symbol), and a negative integer when the symbol is not yet in the
- symbol table and so whether or not its address is zero is unknown. */
+ symbol table and so whether or not its address is zero is unknown.
+ For function local objects always return positive integer. */
static int
maybe_nonzero_address (tree decl)
{
if (struct symtab_node *symbol = symtab_node::get_create (decl))
return symbol->nonzero_address ();
+ /* Function local objects are never NULL. */
+ if (DECL_P (decl)
+ && (DECL_CONTEXT (decl)
+ && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
+ && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
+ return 1;
+
return -1;
}
{
/* We can fold this expression to a constant if the non-constant
offset parts are equal. */
- if ((offset0 == offset1
- || (offset0 && offset1
- && operand_equal_p (offset0, offset1, 0)))
- && (equality_code
- || (indirect_base0
- && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
- || POINTER_TYPE_OVERFLOW_UNDEFINED))
-
+ if (offset0 == offset1
+ || (offset0 && offset1
+ && operand_equal_p (offset0, offset1, 0)))
{
if (!equality_code
&& bitpos0 != bitpos1
because pointer arithmetic is restricted to retain within an
object and overflow on pointer differences is undefined as of
6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
- else if (bitpos0 == bitpos1
- && (equality_code
- || (indirect_base0
- && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
- || POINTER_TYPE_OVERFLOW_UNDEFINED))
+ else if (bitpos0 == bitpos1)
{
/* By converting to signed sizetype we cover middle-end pointer
arithmetic which operates on unsigned pointer types of size
below follow the C++ rules with the additional property that
every object pointer compares greater than a null pointer.
*/
- else if (DECL_P (base0)
- && maybe_nonzero_address (base0) > 0
- /* Avoid folding references to struct members at offset 0 to
- prevent tests like '&ptr->firstmember == 0' from getting
- eliminated. When ptr is null, although the -> expression
- is strictly speaking invalid, GCC retains it as a matter
- of QoI. See PR c/44555. */
- && (offset0 == NULL_TREE && bitpos0 != 0)
+ else if (((DECL_P (base0)
+ && maybe_nonzero_address (base0) > 0
+ /* Avoid folding references to struct members at offset 0 to
+ prevent tests like '&ptr->firstmember == 0' from getting
+ eliminated. When ptr is null, although the -> expression
+ is strictly speaking invalid, GCC retains it as a matter
+ of QoI. See PR c/44555. */
+ && (offset0 == NULL_TREE && bitpos0 != 0))
+ || CONSTANT_CLASS_P (base0))
+ && indirect_base0
/* The caller guarantees that when one of the arguments is
constant (i.e., null in this case) it is second. */
&& integer_zerop (arg1))
if (save_p)
{
tem = save_expr (build2 (code, type, cval1, cval2));
- SET_EXPR_LOCATION (tem, loc);
+ protected_set_expr_location (tem, loc);
return tem;
}
return fold_build2_loc (loc, code, type, cval1, cval2);
}
}
- /* We can fold X/C1 op C2 where C1 and C2 are integer constants
- into a single range test. */
- if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
- || TREE_CODE (arg0) == EXACT_DIV_EXPR)
- && TREE_CODE (arg1) == INTEGER_CST
- && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
- && !integer_zerop (TREE_OPERAND (arg0, 1))
- && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
- && !TREE_OVERFLOW (arg1))
- {
- tem = fold_div_compare (loc, code, type, arg0, arg1);
- if (tem != NULL_TREE)
- return tem;
- }
-
return NULL_TREE;
}
/* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
- CONSTRUCTOR ARG into array ELTS and return true if successful. */
+ CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
+ true if successful. */
static bool
-vec_cst_ctor_to_array (tree arg, tree *elts)
+vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
{
- unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
+ unsigned int i;
if (TREE_CODE (arg) == VECTOR_CST)
{
fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
{
unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
- tree *elts;
bool need_ctor = false;
gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
|| TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
return NULL_TREE;
- elts = XALLOCAVEC (tree, nelts * 3);
- if (!vec_cst_ctor_to_array (arg0, elts)
- || !vec_cst_ctor_to_array (arg1, elts + nelts))
+ tree *in_elts = XALLOCAVEC (tree, nelts * 2);
+ if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
+ || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
return NULL_TREE;
+ auto_vec<tree, 32> out_elts (nelts);
for (i = 0; i < nelts; i++)
{
- if (!CONSTANT_CLASS_P (elts[sel[i]]))
+ if (!CONSTANT_CLASS_P (in_elts[sel[i]]))
need_ctor = true;
- elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
+ out_elts.quick_push (unshare_expr (in_elts[sel[i]]));
}
if (need_ctor)
vec<constructor_elt, va_gc> *v;
vec_alloc (v, nelts);
for (i = 0; i < nelts; i++)
- CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
+ CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
return build_constructor (type, v);
}
else
- return build_vector (type, &elts[2 * nelts]);
+ return build_vector (type, out_elts);
}
/* Try to fold a pointer difference of type TYPE two address expressions of
tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
- tree diff = build2 (MINUS_EXPR, type, op0, op1);
+ tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
return fold_build2_loc (loc, PLUS_EXPR, type,
base_offset,
fold_build2_loc (loc, MULT_EXPR, type,
exact_inverse (tree type, tree cst)
{
REAL_VALUE_TYPE r;
- tree unit_type, *elts;
+ tree unit_type;
machine_mode mode;
unsigned vec_nelts, i;
return NULL_TREE;
case VECTOR_CST:
- vec_nelts = VECTOR_CST_NELTS (cst);
- elts = XALLOCAVEC (tree, vec_nelts);
- unit_type = TREE_TYPE (type);
- mode = TYPE_MODE (unit_type);
+ {
+ vec_nelts = VECTOR_CST_NELTS (cst);
+ unit_type = TREE_TYPE (type);
+ mode = TYPE_MODE (unit_type);
- for (i = 0; i < vec_nelts; i++)
- {
- r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
- if (!exact_real_inverse (mode, &r))
- return NULL_TREE;
- elts[i] = build_real (unit_type, r);
- }
+ auto_vec<tree, 32> elts (vec_nelts);
+ for (i = 0; i < vec_nelts; i++)
+ {
+ r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
+ if (!exact_real_inverse (mode, &r))
+ return NULL_TREE;
+ elts.quick_push (build_real (unit_type, r));
+ }
- return build_vector (type, elts);
+ return build_vector (type, elts);
+ }
default:
return NULL_TREE;
/* Return true when T is an address and is known to be nonzero.
Handle warnings about undefined signed overflow. */
-static bool
+bool
tree_expr_nonzero_p (tree t)
{
bool ret, strict_overflow_p;
/* If this is a commutative operation, and ARG0 is a constant, move it
to ARG1 to reduce the number of tests below. */
if (commutative_tree_code (code)
- && tree_swap_operands_p (arg0, arg1, true))
+ && tree_swap_operands_p (arg0, arg1))
return fold_build2_loc (loc, code, type, op1, op0);
/* Likewise if this is a comparison, and ARG0 is a constant, move it
to ARG1 to reduce the number of tests below. */
if (kind == tcc_comparison
- && tree_swap_operands_p (arg0, arg1, true))
+ && tree_swap_operands_p (arg0, arg1))
return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
tem = generic_simplify (loc, code, type, op0, op1);
return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
tem);
}
- if (TREE_CODE (arg1) == COMPOUND_EXPR
- && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
+ if (TREE_CODE (arg1) == COMPOUND_EXPR)
{
tem = fold_build2_loc (loc, code, type, op0,
fold_convert_loc (loc, TREE_TYPE (op1),
if ((! FLOAT_TYPE_P (type) || flag_associative_math)
&& !TYPE_SATURATING (type))
{
- tree var0, con0, lit0, minus_lit0;
- tree var1, con1, lit1, minus_lit1;
+ tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
+ tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
tree atype = type;
bool ok = true;
then the result with variables. This increases the chances of
literals being recombined later and of generating relocatable
expressions for the sum of a constant and literal. */
- var0 = split_tree (loc, arg0, type, code,
- &con0, &lit0, &minus_lit0, 0);
- var1 = split_tree (loc, arg1, type, code,
- &con1, &lit1, &minus_lit1, code == MINUS_EXPR);
+ var0 = split_tree (arg0, type, code,
+ &minus_var0, &con0, &minus_con0,
+ &lit0, &minus_lit0, 0);
+ var1 = split_tree (arg1, type, code,
+ &minus_var1, &con1, &minus_con1,
+ &lit1, &minus_lit1, code == MINUS_EXPR);
/* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
if (code == MINUS_EXPR)
/* With undefined overflow prefer doing association in a type
which wraps on overflow, if that is one of the operand types. */
- if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
+ if (POINTER_TYPE_P (type)
|| (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
{
if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
/* With undefined overflow we can only associate constants with one
variable, and constants whose association doesn't overflow. */
- if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
+ if (POINTER_TYPE_P (atype)
|| (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
{
- if (var0 && var1)
+ if ((var0 && var1) || (minus_var0 && minus_var1))
{
- tree tmp0 = var0;
- tree tmp1 = var1;
+ /* ??? If split_tree would handle NEGATE_EXPR we could
+ simply reject these cases and the allowed cases would
+ be the var0/minus_var1 ones. */
+ tree tmp0 = var0 ? var0 : minus_var0;
+ tree tmp1 = var1 ? var1 : minus_var1;
bool one_neg = false;
if (TREE_CODE (tmp0) == NEGATE_EXPR)
|| !operand_equal_p (tmp0, tmp1, 0))
ok = false;
}
+ else if ((var0 && minus_var1
+ && ! operand_equal_p (var0, minus_var1, 0))
+ || (minus_var0 && var1
+ && ! operand_equal_p (minus_var0, var1, 0)))
+ ok = false;
}
/* Only do something if we found more than two objects. Otherwise,
nothing has changed and we risk infinite recursion. */
if (ok
&& (2 < ((var0 != 0) + (var1 != 0)
+ + (minus_var0 != 0) + (minus_var1 != 0)
+ (con0 != 0) + (con1 != 0)
+ + (minus_con0 != 0) + (minus_con1 != 0)
+ (lit0 != 0) + (lit1 != 0)
+ (minus_lit0 != 0) + (minus_lit1 != 0))))
{
- bool any_overflows = false;
- if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
- if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
- if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
- if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
var0 = associate_trees (loc, var0, var1, code, atype);
+ minus_var0 = associate_trees (loc, minus_var0, minus_var1,
+ code, atype);
con0 = associate_trees (loc, con0, con1, code, atype);
+ minus_con0 = associate_trees (loc, minus_con0, minus_con1,
+ code, atype);
lit0 = associate_trees (loc, lit0, lit1, code, atype);
minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
code, atype);
+ if (minus_var0 && var0)
+ {
+ var0 = associate_trees (loc, var0, minus_var0,
+ MINUS_EXPR, atype);
+ minus_var0 = 0;
+ }
+ if (minus_con0 && con0)
+ {
+ con0 = associate_trees (loc, con0, minus_con0,
+ MINUS_EXPR, atype);
+ minus_con0 = 0;
+ }
+
/* Preserve the MINUS_EXPR if the negative part of the literal is
greater than the positive part. Otherwise, the multiplicative
folding code (i.e extract_muldiv) may be fooled in case
{
if (TREE_CODE (lit0) == INTEGER_CST
&& TREE_CODE (minus_lit0) == INTEGER_CST
- && tree_int_cst_lt (lit0, minus_lit0))
+ && tree_int_cst_lt (lit0, minus_lit0)
+ /* But avoid ending up with only negated parts. */
+ && (var0 || con0))
{
minus_lit0 = associate_trees (loc, minus_lit0, lit0,
MINUS_EXPR, atype);
}
/* Don't introduce overflows through reassociation. */
- if (!any_overflows
- && ((lit0 && TREE_OVERFLOW_P (lit0))
- || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
+ if ((lit0 && TREE_OVERFLOW_P (lit0))
+ || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
return NULL_TREE;
- if (minus_lit0)
+ /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
+ con0 = associate_trees (loc, con0, lit0, code, atype);
+ lit0 = 0;
+ minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
+ code, atype);
+ minus_lit0 = 0;
+
+ /* Eliminate minus_con0. */
+ if (minus_con0)
{
- if (con0 == 0)
- return
- fold_convert_loc (loc, type,
- associate_trees (loc, var0, minus_lit0,
- MINUS_EXPR, atype));
+ if (con0)
+ con0 = associate_trees (loc, con0, minus_con0,
+ MINUS_EXPR, atype);
+ else if (var0)
+ var0 = associate_trees (loc, var0, minus_con0,
+ MINUS_EXPR, atype);
else
- {
- con0 = associate_trees (loc, con0, minus_lit0,
- MINUS_EXPR, atype);
- return
- fold_convert_loc (loc, type,
- associate_trees (loc, var0, con0,
- PLUS_EXPR, atype));
- }
+ gcc_unreachable ();
+ minus_con0 = 0;
+ }
+
+ /* Eliminate minus_var0. */
+ if (minus_var0)
+ {
+ if (con0)
+ con0 = associate_trees (loc, con0, minus_var0,
+ MINUS_EXPR, atype);
+ else
+ gcc_unreachable ();
+ minus_var0 = 0;
}
- con0 = associate_trees (loc, con0, lit0, code, atype);
return
fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
code, atype));
case MINUS_EXPR:
/* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
if (TREE_CODE (arg0) == NEGATE_EXPR
- && negate_expr_p (op1)
- && reorder_operands_p (arg0, arg1))
+ && negate_expr_p (op1))
return fold_build2_loc (loc, MINUS_EXPR, type,
negate_expr (op1),
fold_convert_loc (loc, type,
if (TREE_CODE (op1) == INTEGER_CST
&& tree_int_cst_sgn (op1) == -1
&& negate_expr_p (op0)
+ && negate_expr_p (op1)
&& (tem = negate_expr (op1)) != op1
&& ! TREE_OVERFLOW (tem))
return fold_build2_loc (loc, MULT_EXPR, type,
/* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
if (msk.and_not (c1 | c2) == 0)
- return fold_build2_loc (loc, BIT_IOR_EXPR, type,
- TREE_OPERAND (arg0, 0), arg1);
+ {
+ tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
+ return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
+ }
/* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
}
if (c3 != c1)
- return fold_build2_loc (loc, BIT_IOR_EXPR, type,
- fold_build2_loc (loc, BIT_AND_EXPR, type,
- TREE_OPERAND (arg0, 0),
- wide_int_to_tree (type,
- c3)),
- arg1);
+ {
+ tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
+ tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
+ wide_int_to_tree (type, c3));
+ return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
+ }
}
/* See if this can be simplified into a rotate first. If that
mode which allows further optimizations. */
int pop = wi::popcount (warg1);
if (!(pop >= BITS_PER_UNIT
- && exact_log2 (pop) != -1
+ && pow2p_hwi (pop)
&& wi::mask (pop, false, warg1.get_precision ()) == warg1))
return fold_build2_loc (loc, code, type, op0,
wide_int_to_tree (type, masked));
/* Convert -A / -B to A / B when the type is signed and overflow is
undefined. */
if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
- && TREE_CODE (arg0) == NEGATE_EXPR
+ && TREE_CODE (op0) == NEGATE_EXPR
&& negate_expr_p (op1))
{
if (INTEGRAL_TYPE_P (type))
/* If first arg is constant zero, return it. */
if (integer_zerop (arg0))
return fold_convert_loc (loc, type, arg0);
+ /* FALLTHRU */
case TRUTH_AND_EXPR:
/* If either arg is constant true, drop it. */
if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
/* If first arg is constant true, return it. */
if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
return fold_convert_loc (loc, type, arg0);
+ /* FALLTHRU */
case TRUTH_OR_EXPR:
/* If either arg is constant zero, drop it. */
if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
TREE_OPERAND (arg1, 0), arg0);
}
- /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
- if (TREE_CODE (arg0) == MINUS_EXPR
- && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
- && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
- 1)),
- arg1, 0)
- && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
- return omit_two_operands_loc (loc, type,
- code == NE_EXPR
- ? boolean_true_node : boolean_false_node,
- TREE_OPERAND (arg0, 1), arg1);
-
- /* Transform comparisons of the form X CMP C - X if C % 2 == 1. */
- if (TREE_CODE (arg1) == MINUS_EXPR
- && TREE_CODE (TREE_OPERAND (arg1, 0)) == INTEGER_CST
- && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
- 1)),
- arg0, 0)
- && wi::extract_uhwi (TREE_OPERAND (arg1, 0), 0, 1) == 1)
- return omit_two_operands_loc (loc, type,
- code == NE_EXPR
- ? boolean_true_node : boolean_false_node,
- TREE_OPERAND (arg1, 1), arg0);
-
/* If this is an EQ or NE comparison with zero and ARG0 is
(1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
two operations, but the latter can be done in one less insn
}
}
- /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
- Similarly for NE_EXPR. */
- if (TREE_CODE (arg0) == BIT_AND_EXPR
- && TREE_CODE (arg1) == INTEGER_CST
- && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
- {
- tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
- TREE_TYPE (TREE_OPERAND (arg0, 1)),
- TREE_OPERAND (arg0, 1));
- tree dandnotc
- = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
- fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
- notc);
- tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
- if (integer_nonzerop (dandnotc))
- return omit_one_operand_loc (loc, type, rslt, arg0);
- }
-
/* If this is a comparison of a field, we may be able to simplify it. */
if ((TREE_CODE (arg0) == COMPONENT_REF
|| TREE_CODE (arg0) == BIT_FIELD_REF)
tree itype = TREE_TYPE (arg0);
if (operand_equal_p (arg01, arg11, 0))
- return fold_build2_loc (loc, code, type,
- fold_build2_loc (loc, BIT_AND_EXPR, itype,
- fold_build2_loc (loc,
- BIT_XOR_EXPR, itype,
- arg00, arg10),
- arg01),
- build_zero_cst (itype));
-
+ {
+ tem = fold_convert_loc (loc, itype, arg10);
+ tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
+ tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
+ return fold_build2_loc (loc, code, type, tem,
+ build_zero_cst (itype));
+ }
if (operand_equal_p (arg01, arg10, 0))
- return fold_build2_loc (loc, code, type,
- fold_build2_loc (loc, BIT_AND_EXPR, itype,
- fold_build2_loc (loc,
- BIT_XOR_EXPR, itype,
- arg00, arg11),
- arg01),
- build_zero_cst (itype));
-
+ {
+ tem = fold_convert_loc (loc, itype, arg11);
+ tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
+ tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
+ return fold_build2_loc (loc, code, type, tem,
+ build_zero_cst (itype));
+ }
if (operand_equal_p (arg00, arg11, 0))
- return fold_build2_loc (loc, code, type,
- fold_build2_loc (loc, BIT_AND_EXPR, itype,
- fold_build2_loc (loc,
- BIT_XOR_EXPR, itype,
- arg01, arg10),
- arg00),
- build_zero_cst (itype));
-
+ {
+ tem = fold_convert_loc (loc, itype, arg10);
+ tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
+ tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
+ return fold_build2_loc (loc, code, type, tem,
+ build_zero_cst (itype));
+ }
if (operand_equal_p (arg00, arg10, 0))
- return fold_build2_loc (loc, code, type,
- fold_build2_loc (loc, BIT_AND_EXPR, itype,
- fold_build2_loc (loc,
- BIT_XOR_EXPR, itype,
- arg01, arg11),
- arg00),
- build_zero_cst (itype));
+ {
+ tem = fold_convert_loc (loc, itype, arg11);
+ tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
+ tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
+ return fold_build2_loc (loc, code, type, tem,
+ build_zero_cst (itype));
+ }
}
if (TREE_CODE (arg0) == BIT_XOR_EXPR
case GOTO_EXPR:
*walk_subtrees = 0;
- /* ... fall through ... */
+ /* fall through */
default:
return NULL_TREE;
/* If this is a commutative operation, and OP0 is a constant, move it
to OP1 to reduce the number of tests below. */
if (commutative_ternary_tree_code (code)
- && tree_swap_operands_p (op0, op1, true))
+ && tree_swap_operands_p (op0, op1))
return fold_build3_loc (loc, code, type, op1, op0, op2);
tem = generic_simplify (loc, code, type, op0, op1, op2);
&& (TREE_CODE (arg2) == VECTOR_CST
|| TREE_CODE (arg2) == CONSTRUCTOR))
{
- unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
+ unsigned int nelts = VECTOR_CST_NELTS (arg0), i;
unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
- gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
+ gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type));
for (i = 0; i < nelts; i++)
{
tree val = VECTOR_CST_ELT (arg0, i);
Also try swapping the arguments and inverting the conditional. */
if (COMPARISON_CLASS_P (arg0)
- && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
- arg1, TREE_OPERAND (arg0, 1))
+ && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), arg1)
&& !HONOR_SIGNED_ZEROS (element_mode (arg1)))
{
tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
}
if (COMPARISON_CLASS_P (arg0)
- && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
- op2,
- TREE_OPERAND (arg0, 1))
+ && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
&& !HONOR_SIGNED_ZEROS (element_mode (op2)))
{
location_t loc0 = expr_location_or (arg0, loc);
/* If the second operand is simpler than the third, swap them
since that produces better jump optimization results. */
if (truth_value_p (TREE_CODE (arg0))
- && tree_swap_operands_p (op1, op2, false))
+ && tree_swap_operands_p (op1, op2))
{
location_t loc0 = expr_location_or (arg0, loc);
/* See if this can be inverted. If it can't, possibly because
STRIP_NOPS (tem);
if (TREE_CODE (tem) == RSHIFT_EXPR
&& tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
- && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
- tree_to_uhwi (TREE_OPERAND (tem, 1)))
+ && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
+ == tree_to_uhwi (TREE_OPERAND (tem, 1)))
return fold_build2_loc (loc, BIT_AND_EXPR, type,
- TREE_OPERAND (tem, 0), arg1);
+ fold_convert_loc (loc, type,
+ TREE_OPERAND (tem, 0)),
+ op1);
}
/* A & N ? N : 0 is simply A & N if N is a power of two. This
&& (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
: TRUTH_ANDIF_EXPR,
- type, fold_convert_loc (loc, type, arg0), arg1);
+ type, fold_convert_loc (loc, type, arg0), op1);
/* Convert A ? B : 1 into !A || B if A and B are truth values. */
if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
? BIT_IOR_EXPR
: TRUTH_ORIF_EXPR,
type, fold_convert_loc (loc, type, tem),
- arg1);
+ op1);
}
/* Convert A ? 0 : B into !A && B if A and B are truth values. */
if (n == 1)
return VECTOR_CST_ELT (arg0, idx);
- tree *vals = XALLOCAVEC (tree, n);
+ auto_vec<tree, 32> vals (n);
for (unsigned i = 0; i < n; ++i)
- vals[i] = VECTOR_CST_ELT (arg0, idx + i);
+ vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
return build_vector (type, vals);
}
}
case VEC_PERM_EXPR:
if (TREE_CODE (arg2) == VECTOR_CST)
{
- unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
+ unsigned int nelts = VECTOR_CST_NELTS (arg2), i, mask, mask2;
unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
unsigned char *sel2 = sel + nelts;
bool need_mask_canon = false;
mask2 = 2 * nelts - 1;
mask = single_arg ? (nelts - 1) : mask2;
- gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
+ gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type));
for (i = 0; i < nelts; i++)
{
tree val = VECTOR_CST_ELT (arg2, i);
if (need_mask_canon && arg2 == op2)
{
- tree *tsel = XALLOCAVEC (tree, nelts);
tree eltype = TREE_TYPE (TREE_TYPE (arg2));
+ auto_vec<tree, 32> tsel (nelts);
for (i = 0; i < nelts; i++)
- tsel[i] = build_int_cst (eltype, sel[i]);
+ tsel.quick_push (build_int_cst (eltype, sel[i]));
op2 = build_vector (TREE_TYPE (arg2), tsel);
changed = true;
}
return arg0;
else
{
- tree *elts = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
- memcpy (elts, VECTOR_CST_ELTS (arg0),
- sizeof (tree) * TYPE_VECTOR_SUBPARTS (type));
+ unsigned int nelts = VECTOR_CST_NELTS (arg0);
+ auto_vec<tree, 32> elts (nelts);
+ elts.quick_grow (nelts);
+ memcpy (&elts[0], VECTOR_CST_ELTS (arg0),
+ sizeof (tree) * nelts);
elts[k] = arg1;
return build_vector (type, elts);
}
expression with code CODE of type TYPE with an operand OP0. */
tree
-fold_build1_stat_loc (location_t loc,
- enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
+fold_build1_loc (location_t loc,
+ enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
{
tree tem;
#ifdef ENABLE_FOLD_CHECKING
tem = fold_unary_loc (loc, code, type, op0);
if (!tem)
- tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
+ tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
#ifdef ENABLE_FOLD_CHECKING
md5_init_ctx (&ctx);
OP0 and OP1. */
tree
-fold_build2_stat_loc (location_t loc,
+fold_build2_loc (location_t loc,
enum tree_code code, tree type, tree op0, tree op1
MEM_STAT_DECL)
{
tem = fold_binary_loc (loc, code, type, op0, op1);
if (!tem)
- tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
+ tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
#ifdef ENABLE_FOLD_CHECKING
md5_init_ctx (&ctx);
type TYPE with operands OP0, OP1, and OP2. */
tree
-fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
+fold_build3_loc (location_t loc, enum tree_code code, tree type,
tree op0, tree op1, tree op2 MEM_STAT_DECL)
{
tree tem;
gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
if (!tem)
- tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
+ tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
#ifdef ENABLE_FOLD_CHECKING
md5_init_ctx (&ctx);
< TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
return 0;
- /* .. fall through ... */
+ /* fall through */
case SAVE_EXPR:
return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
return 1;
}
- /* .. fall through ... */
+ /* fall through */
default:
return 0;
if (nonzero_addr >= 0)
return nonzero_addr;
- /* Function local objects are never NULL. */
- if (DECL_P (base)
- && (DECL_CONTEXT (base)
- && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
- && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
- return true;
-
/* Constants are never weak. */
if (CONSTANT_CLASS_P (base))
return true;
}
break;
+ case SSA_NAME:
+ if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
+ break;
+ return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
+
default:
break;
}
string = exp1;
}
+ scalar_int_mode char_mode;
if (string
&& TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
&& TREE_CODE (string) == STRING_CST
&& TREE_CODE (index) == INTEGER_CST
&& compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
- && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
- == MODE_INT)
- && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
+ && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
+ &char_mode)
+ && GET_MODE_SIZE (char_mode) == 1)
return build_int_cst_type (TREE_TYPE (exp),
(TREE_STRING_POINTER (string)
[TREE_INT_CST_LOW (index)]));
bool overflow;
wide_int val = wi::neg (arg0, &overflow);
t = force_fit_type (type, val, 1,
- (overflow | TREE_OVERFLOW (arg0))
- && !TYPE_UNSIGNED (type));
+ (overflow && ! TYPE_UNSIGNED (type))
+ || TREE_OVERFLOW (arg0));
break;
}
if (!VECTOR_TYPE_P (type))
{
/* Have vector comparison with scalar boolean result. */
- bool result = true;
gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
&& VECTOR_CST_NELTS (op0) == VECTOR_CST_NELTS (op1));
for (unsigned i = 0; i < VECTOR_CST_NELTS (op0); i++)
tree elem0 = VECTOR_CST_ELT (op0, i);
tree elem1 = VECTOR_CST_ELT (op1, i);
tree tmp = fold_relational_const (code, type, elem0, elem1);
- result &= integer_onep (tmp);
+ if (tmp == NULL_TREE)
+ return NULL_TREE;
+ if (integer_zerop (tmp))
+ return constant_boolean_node (false, type);
}
- if (code == NE_EXPR)
- result = !result;
- return constant_boolean_node (result, type);
+ return constant_boolean_node (true, type);
}
unsigned count = VECTOR_CST_NELTS (op0);
- tree *elts = XALLOCAVEC (tree, count);
gcc_assert (VECTOR_CST_NELTS (op1) == count
&& TYPE_VECTOR_SUBPARTS (type) == count);
+ auto_vec<tree, 32> elts (count);
for (unsigned i = 0; i < count; i++)
{
tree elem_type = TREE_TYPE (type);
if (tem == NULL_TREE)
return NULL_TREE;
- elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
+ elts.quick_push (build_int_cst (elem_type,
+ integer_zerop (tem) ? 0 : -1));
}
return build_vector (type, elts);
return expr;
}
- return build1 (CLEANUP_POINT_EXPR, type, expr);
+ return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
}
/* Given a pointer value OP0 and a type TYPE, return a simplified version
STRIP_NOPS (sub);
subtype = TREE_TYPE (sub);
- if (!POINTER_TYPE_P (subtype))
+ if (!POINTER_TYPE_P (subtype)
+ || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
return NULL_TREE;
if (TREE_CODE (sub) == ADDR_EXPR)
&& type == TREE_TYPE (op00type))
{
tree type_domain = TYPE_DOMAIN (op00type);
- tree min_val = size_zero_node;
+ tree min = size_zero_node;
if (type_domain && TYPE_MIN_VALUE (type_domain))
- min_val = TYPE_MIN_VALUE (type_domain);
- op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
- TYPE_SIZE_UNIT (type));
- op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
- return build4_loc (loc, ARRAY_REF, type, op00, op01,
- NULL_TREE, NULL_TREE);
+ min = TYPE_MIN_VALUE (type_domain);
+ offset_int off = wi::to_offset (op01);
+ offset_int el_sz = wi::to_offset (TYPE_SIZE_UNIT (type));
+ offset_int remainder;
+ off = wi::divmod_trunc (off, el_sz, SIGNED, &remainder);
+ if (remainder == 0 && TREE_CODE (min) == INTEGER_CST)
+ {
+ off = off + wi::to_offset (min);
+ op01 = wide_int_to_tree (sizetype, off);
+ return build4_loc (loc, ARRAY_REF, type, op00, op01,
+ NULL_TREE, NULL_TREE);
+ }
}
}
}
}
/* If divisor is a power of two, simplify this to bit manipulation. */
- if (divisor == (divisor & -divisor))
+ if (pow2_or_zerop (divisor))
{
if (TREE_CODE (value) == INTEGER_CST)
{
overflow_p = TREE_OVERFLOW (value);
val += divisor - 1;
- val &= - (int) divisor;
+ val &= (int) -divisor;
if (val == 0)
overflow_p = true;
}
/* If divisor is a power of two, simplify this to bit manipulation. */
- if (divisor == (divisor & -divisor))
+ if (pow2_or_zerop (divisor))
{
tree t;
&volatilep);
core = build_fold_addr_expr_loc (loc, core);
}
+ else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
+ {
+ core = TREE_OPERAND (exp, 0);
+ STRIP_NOPS (core);
+ *pbitpos = 0;
+ *poffset = TREE_OPERAND (exp, 1);
+ if (TREE_CODE (*poffset) == INTEGER_CST)
+ {
+ offset_int tem = wi::sext (wi::to_offset (*poffset),
+ TYPE_PRECISION (TREE_TYPE (*poffset)));
+ tem <<= LOG2_BITS_PER_UNIT;
+ if (wi::fits_shwi_p (tem))
+ {
+ *pbitpos = tem.to_shwi ();
+ *poffset = NULL_TREE;
+ }
+ }
+ }
else
{
core = exp;
}
/* Return a char pointer for a C string if it is a string constant
- or sum of string constant and integer constant. */
+ or sum of string constant and integer constant. We only support
+ string constants properly terminated with '\0' character.
+ If STRLEN is a valid pointer, length (including terminating character)
+ of returned string is stored to the argument. */
const char *
-c_getstr (tree src)
+c_getstr (tree src, unsigned HOST_WIDE_INT *strlen)
{
tree offset_node;
+ if (strlen)
+ *strlen = 0;
+
src = string_constant (src, &offset_node);
if (src == 0)
- return 0;
+ return NULL;
- if (offset_node == 0)
- return TREE_STRING_POINTER (src);
- else if (!tree_fits_uhwi_p (offset_node)
- || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
- return 0;
+ unsigned HOST_WIDE_INT offset = 0;
+ if (offset_node != NULL_TREE)
+ {
+ if (!tree_fits_uhwi_p (offset_node))
+ return NULL;
+ else
+ offset = tree_to_uhwi (offset_node);
+ }
+
+ unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
+ const char *string = TREE_STRING_POINTER (src);
+
+ /* Support only properly null-terminated strings. */
+ if (string_length == 0
+ || string[string_length - 1] != '\0'
+ || offset >= string_length)
+ return NULL;
- return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
+ if (strlen)
+ *strlen = string_length - offset;
+ return string + offset;
}
#if CHECKING_P
x);
}
+/* Verify that various binary operations on vectors are folded
+ correctly. */
+
+static void
+test_vector_folding ()
+{
+ tree inner_type = integer_type_node;
+ tree type = build_vector_type (inner_type, 4);
+ tree zero = build_zero_cst (type);
+ tree one = build_one_cst (type);
+
+ /* Verify equality tests that return a scalar boolean result. */
+ tree res_type = boolean_type_node;
+ ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
+ ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
+ ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
+ ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
+}
+
/* Run all of the selftests within this file. */
void
fold_const_c_tests ()
{
test_arithmetic_folding ();
+ test_vector_folding ();
}
} // namespace selftest