/* Fold a constant sub-tree into a single node for C-compiler
- Copyright (C) 1987-2014 Free Software Foundation, Inc.
+ Copyright (C) 1987-2015 Free Software Foundation, Inc.
This file is part of GCC.
#include "config.h"
#include "system.h"
#include "coretypes.h"
-#include "tm.h"
-#include "flags.h"
+#include "backend.h"
+#include "predict.h"
#include "tree.h"
+#include "gimple.h"
+#include "rtl.h"
+#include "flags.h"
+#include "alias.h"
+#include "fold-const.h"
#include "stor-layout.h"
#include "calls.h"
#include "tree-iterator.h"
#include "realmpfr.h"
-#include "rtl.h"
+#include "insn-config.h"
+#include "expmed.h"
+#include "dojump.h"
+#include "explow.h"
+#include "emit-rtl.h"
+#include "varasm.h"
+#include "stmt.h"
#include "expr.h"
#include "tm_p.h"
#include "target.h"
#include "intl.h"
#include "langhooks.h"
#include "md5.h"
-#include "basic-block.h"
-#include "tree-ssa-alias.h"
#include "internal-fn.h"
#include "tree-eh.h"
-#include "gimple-expr.h"
-#include "is-a.h"
-#include "gimple.h"
#include "gimplify.h"
#include "tree-dfa.h"
-#include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
#include "builtins.h"
#include "cgraph.h"
+#include "generic-match.h"
+#include "optabs.h"
+
+#ifndef LOAD_EXTEND_OP
+#define LOAD_EXTEND_OP(M) UNKNOWN
+#endif
/* Nonzero if we are folding constants inside an initializer; zero
otherwise. */
static tree negate_expr (tree);
static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
-static tree const_binop (enum tree_code, tree, tree);
static enum comparison_code comparison_to_compcode (enum tree_code);
static enum tree_code compcode_to_comparison (enum comparison_code);
static int operand_equal_for_comparison_p (tree, tree, tree);
static int twoval_comparison_p (tree, tree *, tree *, int *);
static tree eval_subst (location_t, tree, tree, tree, tree, tree);
-static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
-static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
static tree make_bit_field_ref (location_t, tree, tree,
HOST_WIDE_INT, HOST_WIDE_INT, int);
static tree optimize_bit_field_compare (location_t, enum tree_code,
tree, tree, tree);
static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
HOST_WIDE_INT *,
- enum machine_mode *, int *, int *,
+ machine_mode *, int *, int *,
tree *, tree *);
-static tree sign_bit_p (tree, const_tree);
static int simple_operand_p (const_tree);
static bool simple_operand_p_2 (tree);
static tree range_binop (enum tree_code, tree, tree, int, tree, int);
enum tree_code, tree,
tree, tree,
tree, tree, int);
-static tree fold_mathfn_compare (location_t,
- enum built_in_function, enum tree_code,
- tree, tree, tree);
-static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
static bool reorder_operands_p (const_tree, const_tree);
static tree fold_negate_const (tree, tree);
static tree fold_not_const (const_tree, tree);
static tree fold_relational_const (enum tree_code, tree, tree, tree);
static tree fold_convert_const (enum tree_code, tree, tree);
+static tree fold_view_convert_expr (tree, tree);
+static bool vec_cst_ctor_to_array (tree, tree *);
+
/* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
Otherwise, return LOC. */
switch (TREE_CODE (t))
{
case INTEGER_CST:
- if (TYPE_OVERFLOW_WRAPS (type))
+ if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
return true;
/* Check that -CST will not overflow type. */
&& TYPE_OVERFLOW_WRAPS (type));
case FIXED_CST:
- case NEGATE_EXPR:
return true;
+ case NEGATE_EXPR:
+ return !TYPE_OVERFLOW_SANITIZED (type);
+
case REAL_CST:
/* We want to canonicalize to positive real constants. Pretend
that only negative ones can be easily negated. */
return negate_expr_p (TREE_OPERAND (t, 0));
case PLUS_EXPR:
- if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
- || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
+ if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
+ || HONOR_SIGNED_ZEROS (element_mode (type)))
return false;
/* -(A + B) -> (-B) - A. */
if (negate_expr_p (TREE_OPERAND (t, 1))
case MINUS_EXPR:
/* We can't turn -(A-B) into B-A when we honor signed zeros. */
- return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
- && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
+ return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
+ && !HONOR_SIGNED_ZEROS (element_mode (type))
&& reorder_operands_p (TREE_OPERAND (t, 0),
TREE_OPERAND (t, 1));
/* Fall through. */
case RDIV_EXPR:
- if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
+ if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
return negate_expr_p (TREE_OPERAND (t, 1))
|| negate_expr_p (TREE_OPERAND (t, 0));
break;
case INTEGER_CST:
tem = fold_negate_const (t, type);
if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
- || !TYPE_OVERFLOW_TRAPS (type))
+ || (ANY_INTEGRAL_TYPE_P (type)
+ && !TYPE_OVERFLOW_TRAPS (type)
+ && TYPE_OVERFLOW_WRAPS (type))
+ || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
return tem;
break;
case REAL_CST:
tem = fold_negate_const (t, type);
- /* Two's complement FP formats, such as c4x, may overflow. */
- if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
- return tem;
- break;
+ return tem;
case FIXED_CST:
tem = fold_negate_const (t, type);
case COMPLEX_CST:
{
- tree rpart = negate_expr (TREE_REALPART (t));
- tree ipart = negate_expr (TREE_IMAGPART (t));
-
- if ((TREE_CODE (rpart) == REAL_CST
- && TREE_CODE (ipart) == REAL_CST)
- || (TREE_CODE (rpart) == INTEGER_CST
- && TREE_CODE (ipart) == INTEGER_CST))
+ tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
+ tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
+ if (rpart && ipart)
return build_complex (type, rpart, ipart);
}
break;
break;
case NEGATE_EXPR:
- return TREE_OPERAND (t, 0);
+ if (!TYPE_OVERFLOW_SANITIZED (type))
+ return TREE_OPERAND (t, 0);
+ break;
case PLUS_EXPR:
- if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
- && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
+ if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
+ && !HONOR_SIGNED_ZEROS (element_mode (type)))
{
/* -(A + B) -> (-B) - A. */
if (negate_expr_p (TREE_OPERAND (t, 1))
case MINUS_EXPR:
/* - (A - B) -> B - A */
- if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
- && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
+ if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
+ && !HONOR_SIGNED_ZEROS (element_mode (type))
&& reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
return fold_build2_loc (loc, MINUS_EXPR, type,
TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
/* Fall through. */
case RDIV_EXPR:
- if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
+ if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
{
tem = TREE_OPERAND (t, 1);
if (negate_expr_p (tem))
STRIP_NOPS (arg1);
STRIP_NOPS (arg2);
- if (TREE_CODE (arg1) == INTEGER_CST)
- return int_const_binop (code, arg1, arg2);
+ if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
+ {
+ if (code == POINTER_PLUS_EXPR)
+ return int_const_binop (PLUS_EXPR,
+ arg1, fold_convert (TREE_TYPE (arg1), arg2));
+
+ return int_const_binop (code, arg1, arg2);
+ }
- if (TREE_CODE (arg1) == REAL_CST)
+ if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
{
- enum machine_mode mode;
+ machine_mode mode;
REAL_VALUE_TYPE d1;
REAL_VALUE_TYPE d2;
REAL_VALUE_TYPE value;
case MINUS_EXPR:
case MULT_EXPR:
case TRUNC_DIV_EXPR:
+ if (TREE_CODE (arg2) != FIXED_CST)
+ return NULL_TREE;
f2 = TREE_FIXED_CST (arg2);
break;
case LSHIFT_EXPR:
case RSHIFT_EXPR:
{
+ if (TREE_CODE (arg2) != INTEGER_CST)
+ return NULL_TREE;
wide_int w2 = arg2;
f2.data.high = w2.elt (1);
f2.data.low = w2.elt (0);
return t;
}
- if (TREE_CODE (arg1) == COMPLEX_CST)
+ if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
{
tree type = TREE_TYPE (arg1);
tree r1 = TREE_REALPART (arg1);
int count = TYPE_VECTOR_SUBPARTS (type), i;
tree *elts = XALLOCAVEC (tree, count);
- if (code == VEC_LSHIFT_EXPR
- || code == VEC_RSHIFT_EXPR)
+ for (i = 0; i < count; i++)
{
- if (!tree_fits_uhwi_p (arg2))
- return NULL_TREE;
+ tree elem1 = VECTOR_CST_ELT (arg1, i);
- unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
- unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
- unsigned HOST_WIDE_INT innerc
- = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
- if (shiftc >= outerc || (shiftc % innerc) != 0)
+ elts[i] = const_binop (code, elem1, arg2);
+
+ /* It is possible that const_binop cannot handle the given
+ code and return NULL_TREE. */
+ if (elts[i] == NULL_TREE)
return NULL_TREE;
- int offset = shiftc / innerc;
- /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
- For reductions, compiler emits VEC_RSHIFT_EXPR always,
- for !BYTES_BIG_ENDIAN picks first vector element, but
- for BYTES_BIG_ENDIAN last element from the vector. */
- if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
- offset = -offset;
- tree zero = build_zero_cst (TREE_TYPE (type));
+ }
+
+ return build_vector (type, elts);
+ }
+ return NULL_TREE;
+}
+
+/* Overload that adds a TYPE parameter to be able to dispatch
+ to fold_relational_const. */
+
+tree
+const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
+{
+ if (TREE_CODE_CLASS (code) == tcc_comparison)
+ return fold_relational_const (code, type, arg1, arg2);
+
+ /* ??? Until we make the const_binop worker take the type of the
+ result as argument put those cases that need it here. */
+ switch (code)
+ {
+ case COMPLEX_EXPR:
+ if ((TREE_CODE (arg1) == REAL_CST
+ && TREE_CODE (arg2) == REAL_CST)
+ || (TREE_CODE (arg1) == INTEGER_CST
+ && TREE_CODE (arg2) == INTEGER_CST))
+ return build_complex (type, arg1, arg2);
+ return NULL_TREE;
+
+ case VEC_PACK_TRUNC_EXPR:
+ case VEC_PACK_FIX_TRUNC_EXPR:
+ {
+ unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
+ tree *elts;
+
+ gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
+ && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
+ if (TREE_CODE (arg1) != VECTOR_CST
+ || TREE_CODE (arg2) != VECTOR_CST)
+ return NULL_TREE;
+
+ elts = XALLOCAVEC (tree, nelts);
+ if (!vec_cst_ctor_to_array (arg1, elts)
+ || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
+ return NULL_TREE;
+
+ for (i = 0; i < nelts; i++)
+ {
+ elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
+ ? NOP_EXPR : FIX_TRUNC_EXPR,
+ TREE_TYPE (type), elts[i]);
+ if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
+ return NULL_TREE;
+ }
+
+ return build_vector (type, elts);
+ }
+
+ case VEC_WIDEN_MULT_LO_EXPR:
+ case VEC_WIDEN_MULT_HI_EXPR:
+ case VEC_WIDEN_MULT_EVEN_EXPR:
+ case VEC_WIDEN_MULT_ODD_EXPR:
+ {
+ unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
+ unsigned int out, ofs, scale;
+ tree *elts;
+
+ gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
+ && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
+ if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
+ return NULL_TREE;
+
+ elts = XALLOCAVEC (tree, nelts * 4);
+ if (!vec_cst_ctor_to_array (arg1, elts)
+ || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
+ return NULL_TREE;
+
+ if (code == VEC_WIDEN_MULT_LO_EXPR)
+ scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
+ else if (code == VEC_WIDEN_MULT_HI_EXPR)
+ scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
+ else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
+ scale = 1, ofs = 0;
+ else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
+ scale = 1, ofs = 1;
+
+ for (out = 0; out < nelts; out++)
+ {
+ unsigned int in1 = (out << scale) + ofs;
+ unsigned int in2 = in1 + nelts * 2;
+ tree t1, t2;
+
+ t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
+ t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
+
+ if (t1 == NULL_TREE || t2 == NULL_TREE)
+ return NULL_TREE;
+ elts[out] = const_binop (MULT_EXPR, t1, t2);
+ if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
+ return NULL_TREE;
+ }
+
+ return build_vector (type, elts);
+ }
+
+ default:;
+ }
+
+ if (TREE_CODE_CLASS (code) != tcc_binary)
+ return NULL_TREE;
+
+ /* Make sure type and arg0 have the same saturating flag. */
+ gcc_checking_assert (TYPE_SATURATING (type)
+ == TYPE_SATURATING (TREE_TYPE (arg1)));
+
+ return const_binop (code, arg1, arg2);
+}
+
+/* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
+ Return zero if computing the constants is not possible. */
+
+tree
+const_unop (enum tree_code code, tree type, tree arg0)
+{
+ switch (code)
+ {
+ CASE_CONVERT:
+ case FLOAT_EXPR:
+ case FIX_TRUNC_EXPR:
+ case FIXED_CONVERT_EXPR:
+ return fold_convert_const (code, type, arg0);
+
+ case ADDR_SPACE_CONVERT_EXPR:
+ if (integer_zerop (arg0))
+ return fold_convert_const (code, type, arg0);
+ break;
+
+ case VIEW_CONVERT_EXPR:
+ return fold_view_convert_expr (type, arg0);
+
+ case NEGATE_EXPR:
+ {
+ /* Can't call fold_negate_const directly here as that doesn't
+ handle all cases and we might not be able to negate some
+ constants. */
+ tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
+ if (tem && CONSTANT_CLASS_P (tem))
+ return tem;
+ break;
+ }
+
+ case ABS_EXPR:
+ if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
+ return fold_abs_const (arg0, type);
+ break;
+
+ case CONJ_EXPR:
+ if (TREE_CODE (arg0) == COMPLEX_CST)
+ {
+ tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
+ TREE_TYPE (type));
+ return build_complex (type, TREE_REALPART (arg0), ipart);
+ }
+ break;
+
+ case BIT_NOT_EXPR:
+ if (TREE_CODE (arg0) == INTEGER_CST)
+ return fold_not_const (arg0, type);
+ /* Perform BIT_NOT_EXPR on each element individually. */
+ else if (TREE_CODE (arg0) == VECTOR_CST)
+ {
+ tree *elements;
+ tree elem;
+ unsigned count = VECTOR_CST_NELTS (arg0), i;
+
+ elements = XALLOCAVEC (tree, count);
for (i = 0; i < count; i++)
{
- if (i + offset < 0 || i + offset >= count)
- elts[i] = zero;
- else
- elts[i] = VECTOR_CST_ELT (arg1, i + offset);
+ elem = VECTOR_CST_ELT (arg0, i);
+ elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
+ if (elem == NULL_TREE)
+ break;
+ elements[i] = elem;
}
+ if (i == count)
+ return build_vector (type, elements);
}
- else
- for (i = 0; i < count; i++)
+ break;
+
+ case TRUTH_NOT_EXPR:
+ if (TREE_CODE (arg0) == INTEGER_CST)
+ return constant_boolean_node (integer_zerop (arg0), type);
+ break;
+
+ case REALPART_EXPR:
+ if (TREE_CODE (arg0) == COMPLEX_CST)
+ return fold_convert (type, TREE_REALPART (arg0));
+ break;
+
+ case IMAGPART_EXPR:
+ if (TREE_CODE (arg0) == COMPLEX_CST)
+ return fold_convert (type, TREE_IMAGPART (arg0));
+ break;
+
+ case VEC_UNPACK_LO_EXPR:
+ case VEC_UNPACK_HI_EXPR:
+ case VEC_UNPACK_FLOAT_LO_EXPR:
+ case VEC_UNPACK_FLOAT_HI_EXPR:
+ {
+ unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
+ tree *elts;
+ enum tree_code subcode;
+
+ gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
+ if (TREE_CODE (arg0) != VECTOR_CST)
+ return NULL_TREE;
+
+ elts = XALLOCAVEC (tree, nelts * 2);
+ if (!vec_cst_ctor_to_array (arg0, elts))
+ return NULL_TREE;
+
+ if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
+ || code == VEC_UNPACK_FLOAT_LO_EXPR))
+ elts += nelts;
+
+ if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
+ subcode = NOP_EXPR;
+ else
+ subcode = FLOAT_EXPR;
+
+ for (i = 0; i < nelts; i++)
{
- tree elem1 = VECTOR_CST_ELT (arg1, i);
+ elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
+ if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
+ return NULL_TREE;
+ }
- elts[i] = const_binop (code, elem1, arg2);
+ return build_vector (type, elts);
+ }
- /* It is possible that const_binop cannot handle the given
- code and return NULL_TREE */
- if (elts[i] == NULL_TREE)
+ case REDUC_MIN_EXPR:
+ case REDUC_MAX_EXPR:
+ case REDUC_PLUS_EXPR:
+ {
+ unsigned int nelts, i;
+ tree *elts;
+ enum tree_code subcode;
+
+ if (TREE_CODE (arg0) != VECTOR_CST)
+ return NULL_TREE;
+ nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
+
+ elts = XALLOCAVEC (tree, nelts);
+ if (!vec_cst_ctor_to_array (arg0, elts))
+ return NULL_TREE;
+
+ switch (code)
+ {
+ case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
+ case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
+ case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
+ default: gcc_unreachable ();
+ }
+
+ for (i = 1; i < nelts; i++)
+ {
+ elts[0] = const_binop (subcode, elts[0], elts[i]);
+ if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
return NULL_TREE;
}
- return build_vector (type, elts);
+ return elts[0];
+ }
+
+ default:
+ break;
}
+
return NULL_TREE;
}
return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
}
-/* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
- Zero means allow extended lvalues. */
-
-int pedantic_lvalues;
-
/* When pedantic, return an expr equal to X but certainly not valid as a
pedantic lvalue. Otherwise, return X. */
static tree
pedantic_non_lvalue_loc (location_t loc, tree x)
{
- if (pedantic_lvalues)
- return non_lvalue_loc (loc, x);
-
return protected_set_expr_location_unshare (x, loc);
}
\f
enum tree_code rcode, tree truth_type,
tree ll_arg, tree lr_arg)
{
- bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
+ bool honor_nans = HONOR_NANS (ll_arg);
enum comparison_code lcompcode = comparison_to_compcode (lcode);
enum comparison_code rcompcode = comparison_to_compcode (rcode);
int compcode;
return 1;
- if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
+ if (!HONOR_SIGNED_ZEROS (arg0))
{
/* If we do not distinguish between signed and unsigned zero,
consider them equal. */
case ADDR_EXPR:
return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
- ? OEP_CONSTANT_ADDRESS_OF : 0);
+ ? OEP_CONSTANT_ADDRESS_OF | OEP_ADDRESS_OF : 0);
default:
break;
}
switch (TREE_CODE (arg0))
{
case INDIRECT_REF:
- flags &= ~OEP_CONSTANT_ADDRESS_OF;
+ if (!(flags & OEP_ADDRESS_OF)
+ && (TYPE_ALIGN (TREE_TYPE (arg0))
+ != TYPE_ALIGN (TREE_TYPE (arg1))))
+ return 0;
+ flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
return OP_SAME (0);
case REALPART_EXPR:
return OP_SAME (0);
case TARGET_MEM_REF:
- flags &= ~OEP_CONSTANT_ADDRESS_OF;
- /* Require equal extra operands and then fall through to MEM_REF
- handling of the two common operands. */
- if (!OP_SAME_WITH_NULL (2)
- || !OP_SAME_WITH_NULL (3)
- || !OP_SAME_WITH_NULL (4))
- return 0;
- /* Fallthru. */
case MEM_REF:
- flags &= ~OEP_CONSTANT_ADDRESS_OF;
/* Require equal access sizes, and similar pointer types.
We can have incomplete types for array references of
variable-sized arrays from the Fortran frontend
though. Also verify the types are compatible. */
- return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
+ if (!((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
|| (TYPE_SIZE (TREE_TYPE (arg0))
&& TYPE_SIZE (TREE_TYPE (arg1))
&& operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
TYPE_SIZE (TREE_TYPE (arg1)), flags)))
&& types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
- && alias_ptr_types_compatible_p
- (TREE_TYPE (TREE_OPERAND (arg0, 1)),
- TREE_TYPE (TREE_OPERAND (arg1, 1)))
- && OP_SAME (0) && OP_SAME (1));
+ && ((flags & OEP_ADDRESS_OF)
+ || (alias_ptr_types_compatible_p
+ (TREE_TYPE (TREE_OPERAND (arg0, 1)),
+ TREE_TYPE (TREE_OPERAND (arg1, 1)))
+ && (MR_DEPENDENCE_CLIQUE (arg0)
+ == MR_DEPENDENCE_CLIQUE (arg1))
+ && (MR_DEPENDENCE_BASE (arg0)
+ == MR_DEPENDENCE_BASE (arg1))
+ && (TYPE_ALIGN (TREE_TYPE (arg0))
+ == TYPE_ALIGN (TREE_TYPE (arg1)))))))
+ return 0;
+ flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
+ return (OP_SAME (0) && OP_SAME (1)
+ /* TARGET_MEM_REF require equal extra operands. */
+ && (TREE_CODE (arg0) != TARGET_MEM_REF
+ || (OP_SAME_WITH_NULL (2)
+ && OP_SAME_WITH_NULL (3)
+ && OP_SAME_WITH_NULL (4))));
case ARRAY_REF:
case ARRAY_RANGE_REF:
may have different types but same value here. */
if (!OP_SAME (0))
return 0;
- flags &= ~OEP_CONSTANT_ADDRESS_OF;
+ flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
TREE_OPERAND (arg1, 1))
|| OP_SAME (1))
if (!OP_SAME_WITH_NULL (0)
|| !OP_SAME (1))
return 0;
- flags &= ~OEP_CONSTANT_ADDRESS_OF;
+ flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
return OP_SAME_WITH_NULL (2);
case BIT_FIELD_REF:
if (!OP_SAME (0))
return 0;
- flags &= ~OEP_CONSTANT_ADDRESS_OF;
+ flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
return OP_SAME (1) && OP_SAME (2);
default:
switch (TREE_CODE (arg0))
{
case ADDR_EXPR:
+ return operand_equal_p (TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg1, 0),
+ flags | OEP_ADDRESS_OF);
+
case TRUTH_NOT_EXPR:
return OP_SAME (0);
switch (TREE_CODE (arg0))
{
case CALL_EXPR:
- /* If the CALL_EXPRs call different functions, then they
- clearly can not be equal. */
- if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
- flags))
+ if ((CALL_EXPR_FN (arg0) == NULL_TREE)
+ != (CALL_EXPR_FN (arg1) == NULL_TREE))
+ /* If not both CALL_EXPRs are either internal or normal function
+ functions, then they are not equal. */
return 0;
+ else if (CALL_EXPR_FN (arg0) == NULL_TREE)
+ {
+ /* If the CALL_EXPRs call different internal functions, then they
+ are not equal. */
+ if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
+ return 0;
+ }
+ else
+ {
+ /* If the CALL_EXPRs call different functions, then they are not
+ equal. */
+ if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
+ flags))
+ return 0;
+ }
{
unsigned int cef = call_expr_flags (arg0);
return non_lvalue_loc (loc, t);
}
-/* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
-
-static tree
-pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
- tree omitted)
-{
- tree t = fold_convert_loc (loc, type, result);
-
- /* If the resulting operand is an empty statement, just return the omitted
- statement casted to void. */
- if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
- return build1_loc (loc, NOP_EXPR, void_type_node,
- fold_ignored_result (omitted));
-
- if (TREE_SIDE_EFFECTS (omitted))
- return build2_loc (loc, COMPOUND_EXPR, type,
- fold_ignored_result (omitted), t);
-
- return pedantic_non_lvalue_loc (loc, t);
-}
-
/* Return a tree for the case when the result of an expression is RESULT
converted to TYPE and OMITTED1 and OMITTED2 were previously operands
of the expression but are now not needed.
&& code != NE_EXPR && code != EQ_EXPR)
return NULL_TREE;
- code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
+ code = invert_tree_comparison (code, HONOR_NANS (op_type));
if (code == ERROR_MARK)
return NULL_TREE;
type, arg);
}
-/* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
- operands are another bit-wise operation with a common input. If so,
- distribute the bit operations to save an operation and possibly two if
- constants are involved. For example, convert
- (A | B) & (A | C) into A | (B & C)
- Further simplification will occur if B and C are constants.
-
- If this optimization cannot be done, 0 will be returned. */
-
-static tree
-distribute_bit_expr (location_t loc, enum tree_code code, tree type,
- tree arg0, tree arg1)
-{
- tree common;
- tree left, right;
-
- if (TREE_CODE (arg0) != TREE_CODE (arg1)
- || TREE_CODE (arg0) == code
- || (TREE_CODE (arg0) != BIT_AND_EXPR
- && TREE_CODE (arg0) != BIT_IOR_EXPR))
- return 0;
-
- if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
- {
- common = TREE_OPERAND (arg0, 0);
- left = TREE_OPERAND (arg0, 1);
- right = TREE_OPERAND (arg1, 1);
- }
- else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
- {
- common = TREE_OPERAND (arg0, 0);
- left = TREE_OPERAND (arg0, 1);
- right = TREE_OPERAND (arg1, 0);
- }
- else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
- {
- common = TREE_OPERAND (arg0, 1);
- left = TREE_OPERAND (arg0, 0);
- right = TREE_OPERAND (arg1, 1);
- }
- else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
- {
- common = TREE_OPERAND (arg0, 1);
- left = TREE_OPERAND (arg0, 0);
- right = TREE_OPERAND (arg1, 0);
- }
- else
- return 0;
-
- common = fold_convert_loc (loc, type, common);
- left = fold_convert_loc (loc, type, left);
- right = fold_convert_loc (loc, type, right);
- return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
- fold_build2_loc (loc, code, type, left, right));
-}
-
/* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
with code CODE. This optimization is unsafe. */
static tree
tree type = TREE_TYPE (lhs);
tree unsigned_type;
int const_p = TREE_CODE (rhs) == INTEGER_CST;
- enum machine_mode lmode, rmode, nmode;
+ machine_mode lmode, rmode, nmode;
int lunsignedp, runsignedp;
int lvolatilep = 0, rvolatilep = 0;
tree linner, rinner = NULL_TREE;
static tree
decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
- HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
+ HOST_WIDE_INT *pbitpos, machine_mode *pmode,
int *punsignedp, int *pvolatilep,
tree *pmask, tree *pand_mask)
{
The return value is the (sub)expression whose sign bit is VAL,
or NULL_TREE otherwise. */
-static tree
+tree
sign_bit_p (tree exp, const_tree val)
{
int width;
{
tree etype = TREE_TYPE (exp), value;
-#ifdef HAVE_canonicalize_funcptr_for_compare
/* Disable this optimization for function pointer expressions
on targets that require function pointer canonicalization. */
- if (HAVE_canonicalize_funcptr_for_compare
+ if (targetm.have_canonicalize_funcptr_for_compare ()
&& TREE_CODE (etype) == POINTER_TYPE
&& TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
return NULL_TREE;
-#endif
if (! in_p)
{
Note that all these transformations are correct if A is
NaN, since the two alternatives (A and -A) are also NaNs. */
- if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
+ if (!HONOR_SIGNED_ZEROS (element_mode (type))
&& (FLOAT_TYPE_P (TREE_TYPE (arg01))
? real_zerop (arg01)
: integer_zerop (arg01))
both transformations are correct when A is NaN: A != 0
is then true, and A == 0 is false. */
- if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
+ if (!HONOR_SIGNED_ZEROS (element_mode (type))
&& integer_zerop (arg01) && integer_zerop (arg2))
{
if (comp_code == NE_EXPR)
a number and A is not. The conditions in the original
expressions will be false, so all four give B. The min()
and max() versions would give a NaN instead. */
- if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
+ if (!HONOR_SIGNED_ZEROS (element_mode (type))
&& operand_equal_for_comparison_p (arg01, arg2, arg00)
/* Avoid these transformations if the COND_EXPR may be used
as an lvalue in the C++ front-end. PR c++/19199. */
&& (in_gimple_form
|| VECTOR_TYPE_P (type)
- || (strcmp (lang_hooks.name, "GNU C++") != 0
+ || (! lang_GNU_CXX ()
&& strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
|| ! maybe_lvalue_p (arg1)
|| ! maybe_lvalue_p (arg2)))
operand which will be used if they are equal first
so that we can convert this back to the
corresponding COND_EXPR. */
- if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
+ if (!HONOR_NANS (arg1))
{
comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
case GT_EXPR:
case UNGE_EXPR:
case UNGT_EXPR:
- if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
+ if (!HONOR_NANS (arg1))
{
comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
}
break;
case UNEQ_EXPR:
- if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
+ if (!HONOR_NANS (arg1))
return pedantic_non_lvalue_loc (loc,
fold_convert_loc (loc, type, arg2));
break;
case LTGT_EXPR:
- if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
+ if (!HONOR_NANS (arg1))
return pedantic_non_lvalue_loc (loc,
fold_convert_loc (loc, type, arg1));
break;
}
}
- inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
+ inv_code = invert_tree_comparison (code, HONOR_NANS (type));
if (inv_code == rhs_code
&& operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
&& operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
- enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
- enum machine_mode lnmode, rnmode;
+ machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
+ machine_mode lnmode, rnmode;
tree ll_mask, lr_mask, rl_mask, rr_mask;
tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
tree l_const, r_const;
|| EXPRESSION_CLASS_P (op0))
/* ... and has wrapping overflow, and its type is smaller
than ctype, then we cannot pass through as widening. */
- && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
+ && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
+ && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
&& (TYPE_PRECISION (ctype)
> TYPE_PRECISION (TREE_TYPE (op0))))
/* ... or this is a truncation (t is narrower than op0),
/* ... or has undefined overflow while the converted to
type has not, we cannot do the operation in the inner type
as that would introduce undefined overflow. */
- || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
+ || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
+ && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
&& !TYPE_OVERFLOW_UNDEFINED (type))))
break;
/* If OP1 was not easily negatable, the constant may be OP0. */
if (TREE_CODE (op0) == INTEGER_CST)
{
- tree tem = op0;
- op0 = op1;
- op1 = tem;
- tem = t1;
- t1 = t2;
- t2 = tem;
+ std::swap (op0, op1);
+ std::swap (t1, t2);
}
}
return false;
/* Don't allow the fold with -fsignaling-nans. */
- if (HONOR_SNANS (TYPE_MODE (type)))
+ if (HONOR_SNANS (element_mode (type)))
return false;
/* Allow the fold if zeros aren't signed, or their sign isn't important. */
- if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
+ if (!HONOR_SIGNED_ZEROS (element_mode (type)))
return true;
/* In a vector or complex, we would need to check the sign of all zeros. */
In this situation, there is only one case we can return true for.
X - 0 is the same as X unless rounding towards -infinity is
supported. */
- return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
-}
-
-/* Subroutine of fold() that checks comparisons of built-in math
- functions against real constants.
-
- FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
- operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
- is the type of the result and ARG0 and ARG1 are the operands of the
- comparison. ARG1 must be a TREE_REAL_CST.
-
- The function returns the constant folded tree if a simplification
- can be made, and NULL_TREE otherwise. */
-
-static tree
-fold_mathfn_compare (location_t loc,
- enum built_in_function fcode, enum tree_code code,
- tree type, tree arg0, tree arg1)
-{
- REAL_VALUE_TYPE c;
-
- if (BUILTIN_SQRT_P (fcode))
- {
- tree arg = CALL_EXPR_ARG (arg0, 0);
- enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
-
- c = TREE_REAL_CST (arg1);
- if (REAL_VALUE_NEGATIVE (c))
- {
- /* sqrt(x) < y is always false, if y is negative. */
- if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
- return omit_one_operand_loc (loc, type, integer_zero_node, arg);
-
- /* sqrt(x) > y is always true, if y is negative and we
- don't care about NaNs, i.e. negative values of x. */
- if (code == NE_EXPR || !HONOR_NANS (mode))
- return omit_one_operand_loc (loc, type, integer_one_node, arg);
-
- /* sqrt(x) > y is the same as x >= 0, if y is negative. */
- return fold_build2_loc (loc, GE_EXPR, type, arg,
- build_real (TREE_TYPE (arg), dconst0));
- }
- else if (code == GT_EXPR || code == GE_EXPR)
- {
- REAL_VALUE_TYPE c2;
-
- REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
- real_convert (&c2, mode, &c2);
-
- if (REAL_VALUE_ISINF (c2))
- {
- /* sqrt(x) > y is x == +Inf, when y is very large. */
- if (HONOR_INFINITIES (mode))
- return fold_build2_loc (loc, EQ_EXPR, type, arg,
- build_real (TREE_TYPE (arg), c2));
-
- /* sqrt(x) > y is always false, when y is very large
- and we don't care about infinities. */
- return omit_one_operand_loc (loc, type, integer_zero_node, arg);
- }
-
- /* sqrt(x) > c is the same as x > c*c. */
- return fold_build2_loc (loc, code, type, arg,
- build_real (TREE_TYPE (arg), c2));
- }
- else if (code == LT_EXPR || code == LE_EXPR)
- {
- REAL_VALUE_TYPE c2;
-
- REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
- real_convert (&c2, mode, &c2);
-
- if (REAL_VALUE_ISINF (c2))
- {
- /* sqrt(x) < y is always true, when y is a very large
- value and we don't care about NaNs or Infinities. */
- if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
- return omit_one_operand_loc (loc, type, integer_one_node, arg);
-
- /* sqrt(x) < y is x != +Inf when y is very large and we
- don't care about NaNs. */
- if (! HONOR_NANS (mode))
- return fold_build2_loc (loc, NE_EXPR, type, arg,
- build_real (TREE_TYPE (arg), c2));
-
- /* sqrt(x) < y is x >= 0 when y is very large and we
- don't care about Infinities. */
- if (! HONOR_INFINITIES (mode))
- return fold_build2_loc (loc, GE_EXPR, type, arg,
- build_real (TREE_TYPE (arg), dconst0));
-
- /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
- arg = save_expr (arg);
- return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
- fold_build2_loc (loc, GE_EXPR, type, arg,
- build_real (TREE_TYPE (arg),
- dconst0)),
- fold_build2_loc (loc, NE_EXPR, type, arg,
- build_real (TREE_TYPE (arg),
- c2)));
- }
-
- /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
- if (! HONOR_NANS (mode))
- return fold_build2_loc (loc, code, type, arg,
- build_real (TREE_TYPE (arg), c2));
-
- /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
- arg = save_expr (arg);
- return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
- fold_build2_loc (loc, GE_EXPR, type, arg,
- build_real (TREE_TYPE (arg),
- dconst0)),
- fold_build2_loc (loc, code, type, arg,
- build_real (TREE_TYPE (arg),
- c2)));
- }
- }
-
- return NULL_TREE;
-}
-
-/* Subroutine of fold() that optimizes comparisons against Infinities,
- either +Inf or -Inf.
-
- CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
- GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
- are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
-
- The function returns the constant folded tree if a simplification
- can be made, and NULL_TREE otherwise. */
-
-static tree
-fold_inf_compare (location_t loc, enum tree_code code, tree type,
- tree arg0, tree arg1)
-{
- enum machine_mode mode;
- REAL_VALUE_TYPE max;
- tree temp;
- bool neg;
-
- mode = TYPE_MODE (TREE_TYPE (arg0));
-
- /* For negative infinity swap the sense of the comparison. */
- neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
- if (neg)
- code = swap_tree_comparison (code);
-
- switch (code)
- {
- case GT_EXPR:
- /* x > +Inf is always false, if with ignore sNANs. */
- if (HONOR_SNANS (mode))
- return NULL_TREE;
- return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
-
- case LE_EXPR:
- /* x <= +Inf is always true, if we don't case about NaNs. */
- if (! HONOR_NANS (mode))
- return omit_one_operand_loc (loc, type, integer_one_node, arg0);
-
- /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
- arg0 = save_expr (arg0);
- return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
-
- case EQ_EXPR:
- case GE_EXPR:
- /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
- real_maxval (&max, neg, mode);
- return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
- arg0, build_real (TREE_TYPE (arg0), max));
-
- case LT_EXPR:
- /* x < +Inf is always equal to x <= DBL_MAX. */
- real_maxval (&max, neg, mode);
- return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
- arg0, build_real (TREE_TYPE (arg0), max));
-
- case NE_EXPR:
- /* x != +Inf is always equal to !(x > DBL_MAX). */
- real_maxval (&max, neg, mode);
- if (! HONOR_NANS (mode))
- return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
- arg0, build_real (TREE_TYPE (arg0), max));
-
- temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
- arg0, build_real (TREE_TYPE (arg0), max));
- return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
-
- default:
- break;
- }
-
- return NULL_TREE;
+ return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
}
/* Subroutine of fold() that optimizes comparisons of a division by
tree inner = TREE_OPERAND (arg0, 0);
tree type = TREE_TYPE (arg0);
int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
- enum machine_mode operand_mode = TYPE_MODE (type);
+ machine_mode operand_mode = TYPE_MODE (type);
int ops_unsigned;
tree signed_type, unsigned_type, intermediate_type;
tree tem, one;
/* If we are going to be able to omit the AND below, we must do our
operations as unsigned. If we must use the AND, we have a choice.
Normally unsigned is faster, but for some machines signed is. */
-#ifdef LOAD_EXTEND_OP
ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
&& !flag_syntax_only) ? 0 : 1;
-#else
- ops_unsigned = 1;
-#endif
signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
return 0;
}
-/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
- ARG0 is extended to a wider type. */
-
-static tree
-fold_widened_comparison (location_t loc, enum tree_code code,
- tree type, tree arg0, tree arg1)
-{
- tree arg0_unw = get_unwidened (arg0, NULL_TREE);
- tree arg1_unw;
- tree shorter_type, outer_type;
- tree min, max;
- bool above, below;
-
- if (arg0_unw == arg0)
- return NULL_TREE;
- shorter_type = TREE_TYPE (arg0_unw);
-
-#ifdef HAVE_canonicalize_funcptr_for_compare
- /* Disable this optimization if we're casting a function pointer
- type on targets that require function pointer canonicalization. */
- if (HAVE_canonicalize_funcptr_for_compare
- && TREE_CODE (shorter_type) == POINTER_TYPE
- && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
- return NULL_TREE;
-#endif
-
- if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
- return NULL_TREE;
-
- arg1_unw = get_unwidened (arg1, NULL_TREE);
-
- /* If possible, express the comparison in the shorter mode. */
- if ((code == EQ_EXPR || code == NE_EXPR
- || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
- && (TREE_TYPE (arg1_unw) == shorter_type
- || ((TYPE_PRECISION (shorter_type)
- >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
- && (TYPE_UNSIGNED (shorter_type)
- == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
- || (TREE_CODE (arg1_unw) == INTEGER_CST
- && (TREE_CODE (shorter_type) == INTEGER_TYPE
- || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
- && int_fits_type_p (arg1_unw, shorter_type))))
- return fold_build2_loc (loc, code, type, arg0_unw,
- fold_convert_loc (loc, shorter_type, arg1_unw));
-
- if (TREE_CODE (arg1_unw) != INTEGER_CST
- || TREE_CODE (shorter_type) != INTEGER_TYPE
- || !int_fits_type_p (arg1_unw, shorter_type))
- return NULL_TREE;
-
- /* If we are comparing with the integer that does not fit into the range
- of the shorter type, the result is known. */
- outer_type = TREE_TYPE (arg1_unw);
- min = lower_bound_in_type (outer_type, shorter_type);
- max = upper_bound_in_type (outer_type, shorter_type);
-
- above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
- max, arg1_unw));
- below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
- arg1_unw, min));
-
- switch (code)
- {
- case EQ_EXPR:
- if (above || below)
- return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
- break;
-
- case NE_EXPR:
- if (above || below)
- return omit_one_operand_loc (loc, type, integer_one_node, arg0);
- break;
-
- case LT_EXPR:
- case LE_EXPR:
- if (above)
- return omit_one_operand_loc (loc, type, integer_one_node, arg0);
- else if (below)
- return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
-
- case GT_EXPR:
- case GE_EXPR:
- if (above)
- return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
- else if (below)
- return omit_one_operand_loc (loc, type, integer_one_node, arg0);
-
- default:
- break;
- }
-
- return NULL_TREE;
-}
-
-/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
- ARG0 just the signedness is changed. */
-
-static tree
-fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
- tree arg0, tree arg1)
-{
- tree arg0_inner;
- tree inner_type, outer_type;
-
- if (!CONVERT_EXPR_P (arg0))
- return NULL_TREE;
-
- outer_type = TREE_TYPE (arg0);
- arg0_inner = TREE_OPERAND (arg0, 0);
- inner_type = TREE_TYPE (arg0_inner);
-
-#ifdef HAVE_canonicalize_funcptr_for_compare
- /* Disable this optimization if we're casting a function pointer
- type on targets that require function pointer canonicalization. */
- if (HAVE_canonicalize_funcptr_for_compare
- && TREE_CODE (inner_type) == POINTER_TYPE
- && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
- return NULL_TREE;
-#endif
-
- if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
- return NULL_TREE;
-
- if (TREE_CODE (arg1) != INTEGER_CST
- && !(CONVERT_EXPR_P (arg1)
- && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
- return NULL_TREE;
-
- if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
- && code != NE_EXPR
- && code != EQ_EXPR)
- return NULL_TREE;
-
- if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
- return NULL_TREE;
-
- if (TREE_CODE (arg1) == INTEGER_CST)
- arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
- TREE_OVERFLOW (arg1));
- else
- arg1 = fold_convert_loc (loc, inner_type, arg1);
-
- return fold_build2_loc (loc, code, type, arg0_inner, arg1);
-}
-
/* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
means A >= Y && A != MAX, but in this case we know that
native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
{
tree type = TREE_TYPE (expr);
- enum machine_mode mode = TYPE_MODE (type);
+ machine_mode mode = TYPE_MODE (type);
int total_bytes = GET_MODE_SIZE (mode);
FIXED_VALUE_TYPE value;
tree i_value, i_type;
static tree
native_interpret_real (tree type, const unsigned char *ptr, int len)
{
- enum machine_mode mode = TYPE_MODE (type);
+ machine_mode mode = TYPE_MODE (type);
int total_bytes = GET_MODE_SIZE (mode);
int byte, offset, word, words, bitpos;
unsigned char value;
return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
}
-static bool vec_cst_ctor_to_array (tree, tree *);
-
/* Fold a unary expression of code CODE and type TYPE with operand
OP0. Return the folded expression if folding is successful.
Otherwise, return NULL_TREE. */
constant folder. */
STRIP_NOPS (arg0);
}
+
+ if (CONSTANT_CLASS_P (arg0))
+ {
+ tree tem = const_unop (code, type, arg0);
+ if (tem)
+ {
+ if (TREE_TYPE (tem) != type)
+ tem = fold_convert_loc (loc, type, tem);
+ return tem;
+ }
+ }
}
+ tem = generic_simplify (loc, code, type, op0);
+ if (tem)
+ return tem;
+
if (TREE_CODE_CLASS (code) == tcc_unary)
{
if (TREE_CODE (arg0) == COMPOUND_EXPR)
switch (code)
{
- case PAREN_EXPR:
- /* Re-association barriers around constants and other re-association
- barriers can be removed. */
- if (CONSTANT_CLASS_P (op0)
- || TREE_CODE (op0) == PAREN_EXPR)
- return fold_convert_loc (loc, type, op0);
- return NULL_TREE;
-
case NON_LVALUE_EXPR:
if (!maybe_lvalue_p (op0))
return fold_convert_loc (loc, type, op0);
CASE_CONVERT:
case FLOAT_EXPR:
case FIX_TRUNC_EXPR:
- if (TREE_TYPE (op0) == type)
- return op0;
-
if (COMPARISON_CLASS_P (op0))
{
/* If we have (type) (a CMP b) and type is an integral type, return
constant_boolean_node (false, type));
}
- /* Handle cases of two conversions in a row. */
- if (CONVERT_EXPR_P (op0))
- {
- tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
- tree inter_type = TREE_TYPE (op0);
- int inside_int = INTEGRAL_TYPE_P (inside_type);
- int inside_ptr = POINTER_TYPE_P (inside_type);
- int inside_float = FLOAT_TYPE_P (inside_type);
- int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
- unsigned int inside_prec = TYPE_PRECISION (inside_type);
- int inside_unsignedp = TYPE_UNSIGNED (inside_type);
- int inter_int = INTEGRAL_TYPE_P (inter_type);
- int inter_ptr = POINTER_TYPE_P (inter_type);
- int inter_float = FLOAT_TYPE_P (inter_type);
- int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
- unsigned int inter_prec = TYPE_PRECISION (inter_type);
- int inter_unsignedp = TYPE_UNSIGNED (inter_type);
- int final_int = INTEGRAL_TYPE_P (type);
- int final_ptr = POINTER_TYPE_P (type);
- int final_float = FLOAT_TYPE_P (type);
- int final_vec = TREE_CODE (type) == VECTOR_TYPE;
- unsigned int final_prec = TYPE_PRECISION (type);
- int final_unsignedp = TYPE_UNSIGNED (type);
-
- /* In addition to the cases of two conversions in a row
- handled below, if we are converting something to its own
- type via an object of identical or wider precision, neither
- conversion is needed. */
- if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
- && (((inter_int || inter_ptr) && final_int)
- || (inter_float && final_float))
- && inter_prec >= final_prec)
- return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
-
- /* Likewise, if the intermediate and initial types are either both
- float or both integer, we don't need the middle conversion if the
- former is wider than the latter and doesn't change the signedness
- (for integers). Avoid this if the final type is a pointer since
- then we sometimes need the middle conversion. Likewise if the
- final type has a precision not equal to the size of its mode. */
- if (((inter_int && inside_int)
- || (inter_float && inside_float)
- || (inter_vec && inside_vec))
- && inter_prec >= inside_prec
- && (inter_float || inter_vec
- || inter_unsignedp == inside_unsignedp)
- && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
- && TYPE_MODE (type) == TYPE_MODE (inter_type))
- && ! final_ptr
- && (! final_vec || inter_prec == inside_prec))
- return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
-
- /* If we have a sign-extension of a zero-extended value, we can
- replace that by a single zero-extension. Likewise if the
- final conversion does not change precision we can drop the
- intermediate conversion. */
- if (inside_int && inter_int && final_int
- && ((inside_prec < inter_prec && inter_prec < final_prec
- && inside_unsignedp && !inter_unsignedp)
- || final_prec == inter_prec))
- return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
-
- /* Two conversions in a row are not needed unless:
- - some conversion is floating-point (overstrict for now), or
- - some conversion is a vector (overstrict for now), or
- - the intermediate type is narrower than both initial and
- final, or
- - the intermediate type and innermost type differ in signedness,
- and the outermost type is wider than the intermediate, or
- - the initial type is a pointer type and the precisions of the
- intermediate and final types differ, or
- - the final type is a pointer type and the precisions of the
- initial and intermediate types differ. */
- if (! inside_float && ! inter_float && ! final_float
- && ! inside_vec && ! inter_vec && ! final_vec
- && (inter_prec >= inside_prec || inter_prec >= final_prec)
- && ! (inside_int && inter_int
- && inter_unsignedp != inside_unsignedp
- && inter_prec < final_prec)
- && ((inter_unsignedp && inter_prec > inside_prec)
- == (final_unsignedp && final_prec > inter_prec))
- && ! (inside_ptr && inter_prec != final_prec)
- && ! (final_ptr && inside_prec != inter_prec)
- && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
- && TYPE_MODE (type) == TYPE_MODE (inter_type)))
- return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
- }
-
/* Handle (T *)&A.B.C for A being of type T and B and C
living at offset zero. This occurs frequently in
C++ upcasting and then accessing the base. */
{
HOST_WIDE_INT bitsize, bitpos;
tree offset;
- enum machine_mode mode;
+ machine_mode mode;
int unsignedp, volatilep;
tree base = TREE_OPERAND (op0, 0);
base = get_inner_reference (base, &bitsize, &bitpos, &offset,
cst &= HOST_WIDE_INT_M1U
<< (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
change = (cst == 0);
-#ifdef LOAD_EXTEND_OP
if (change
&& !flag_syntax_only
&& (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
and0 = fold_convert_loc (loc, uns, and0);
and1 = fold_convert_loc (loc, uns, and1);
}
-#endif
}
if (change)
{
}
}
- tem = fold_convert_const (code, type, arg0);
- return tem ? tem : NULL_TREE;
-
- case ADDR_SPACE_CONVERT_EXPR:
- if (integer_zerop (arg0))
- return fold_convert_const (code, type, arg0);
return NULL_TREE;
- case FIXED_CONVERT_EXPR:
- tem = fold_convert_const (code, type, arg0);
- return tem ? tem : NULL_TREE;
-
case VIEW_CONVERT_EXPR:
- if (TREE_TYPE (op0) == type)
- return op0;
- if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
- return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
- type, TREE_OPERAND (op0, 0));
if (TREE_CODE (op0) == MEM_REF)
return fold_build2_loc (loc, MEM_REF, type,
TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
- /* For integral conversions with the same precision or pointer
- conversions use a NOP_EXPR instead. */
- if ((INTEGRAL_TYPE_P (type)
- || POINTER_TYPE_P (type))
- && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
- || POINTER_TYPE_P (TREE_TYPE (op0)))
- && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
- return fold_convert_loc (loc, type, op0);
-
- /* Strip inner integral conversions that do not change the precision. */
- if (CONVERT_EXPR_P (op0)
- && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
- || POINTER_TYPE_P (TREE_TYPE (op0)))
- && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
- || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
- && (TYPE_PRECISION (TREE_TYPE (op0))
- == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
- return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
- type, TREE_OPERAND (op0, 0));
-
- return fold_view_convert_expr (type, op0);
+ return NULL_TREE;
case NEGATE_EXPR:
tem = fold_negate_expr (loc, arg0);
return NULL_TREE;
case ABS_EXPR:
- if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
- return fold_abs_const (arg0, type);
- else if (TREE_CODE (arg0) == NEGATE_EXPR)
- return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
/* Convert fabs((double)float) into (double)fabsf(float). */
- else if (TREE_CODE (arg0) == NOP_EXPR
- && TREE_CODE (type) == REAL_TYPE)
+ if (TREE_CODE (arg0) == NOP_EXPR
+ && TREE_CODE (type) == REAL_TYPE)
{
tree targ0 = strip_float_extensions (arg0);
if (targ0 != arg0)
TREE_TYPE (targ0),
targ0));
}
- /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
- else if (TREE_CODE (arg0) == ABS_EXPR)
- return arg0;
- else if (tree_expr_nonnegative_p (arg0))
- return arg0;
/* Strip sign ops from argument. */
if (TREE_CODE (type) == REAL_TYPE)
return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
negate_expr (ipart));
}
- if (TREE_CODE (arg0) == COMPLEX_CST)
- {
- tree itype = TREE_TYPE (type);
- tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
- tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
- return build_complex (type, rpart, negate_expr (ipart));
- }
if (TREE_CODE (arg0) == CONJ_EXPR)
return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
return NULL_TREE;
case BIT_NOT_EXPR:
- if (TREE_CODE (arg0) == INTEGER_CST)
- return fold_not_const (arg0, type);
- else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
- return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
- /* Convert ~ (-A) to A - 1. */
- else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
- return fold_build2_loc (loc, MINUS_EXPR, type,
- fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
- build_int_cst (type, 1));
- /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
- else if (INTEGRAL_TYPE_P (type)
- && ((TREE_CODE (arg0) == MINUS_EXPR
- && integer_onep (TREE_OPERAND (arg0, 1)))
- || (TREE_CODE (arg0) == PLUS_EXPR
- && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
- return fold_build1_loc (loc, NEGATE_EXPR, type,
- fold_convert_loc (loc, type,
- TREE_OPERAND (arg0, 0)));
/* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
- else if (TREE_CODE (arg0) == BIT_XOR_EXPR
- && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
- fold_convert_loc (loc, type,
- TREE_OPERAND (arg0, 0)))))
+ if (TREE_CODE (arg0) == BIT_XOR_EXPR
+ && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
+ fold_convert_loc (loc, type,
+ TREE_OPERAND (arg0, 0)))))
return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
- fold_convert_loc (loc, type,
- TREE_OPERAND (arg0, 1)));
+ fold_convert_loc (loc, type,
+ TREE_OPERAND (arg0, 1)));
else if (TREE_CODE (arg0) == BIT_XOR_EXPR
&& (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
fold_convert_loc (loc, type,
return fold_build2_loc (loc, BIT_XOR_EXPR, type,
fold_convert_loc (loc, type,
TREE_OPERAND (arg0, 0)), tem);
- /* Perform BIT_NOT_EXPR on each element individually. */
- else if (TREE_CODE (arg0) == VECTOR_CST)
- {
- tree *elements;
- tree elem;
- unsigned count = VECTOR_CST_NELTS (arg0), i;
-
- elements = XALLOCAVEC (tree, count);
- for (i = 0; i < count; i++)
- {
- elem = VECTOR_CST_ELT (arg0, i);
- elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
- if (elem == NULL_TREE)
- break;
- elements[i] = elem;
- }
- if (i == count)
- return build_vector (type, elements);
- }
- else if (COMPARISON_CLASS_P (arg0)
- && (VECTOR_TYPE_P (type)
- || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
- {
- tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
- enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
- HONOR_NANS (TYPE_MODE (op_type)));
- if (subcode != ERROR_MARK)
- return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg0, 1));
- }
-
return NULL_TREE;
case REALPART_EXPR:
if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
return fold_convert_loc (loc, type, arg0);
- if (TREE_CODE (arg0) == COMPLEX_EXPR)
- return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg0, 1));
- if (TREE_CODE (arg0) == COMPLEX_CST)
- return fold_convert_loc (loc, type, TREE_REALPART (arg0));
if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
{
tree itype = TREE_TYPE (TREE_TYPE (arg0));
case IMAGPART_EXPR:
if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
return build_zero_cst (type);
- if (TREE_CODE (arg0) == COMPLEX_EXPR)
- return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg0, 0));
- if (TREE_CODE (arg0) == COMPLEX_CST)
- return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
{
tree itype = TREE_TYPE (TREE_TYPE (arg0));
}
return NULL_TREE;
- case VEC_UNPACK_LO_EXPR:
- case VEC_UNPACK_HI_EXPR:
- case VEC_UNPACK_FLOAT_LO_EXPR:
- case VEC_UNPACK_FLOAT_HI_EXPR:
- {
- unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
- tree *elts;
- enum tree_code subcode;
-
- gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
- if (TREE_CODE (arg0) != VECTOR_CST)
- return NULL_TREE;
-
- elts = XALLOCAVEC (tree, nelts * 2);
- if (!vec_cst_ctor_to_array (arg0, elts))
- return NULL_TREE;
-
- if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
- || code == VEC_UNPACK_FLOAT_LO_EXPR))
- elts += nelts;
-
- if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
- subcode = NOP_EXPR;
- else
- subcode = FLOAT_EXPR;
-
- for (i = 0; i < nelts; i++)
- {
- elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
- if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
- return NULL_TREE;
- }
-
- return build_vector (type, elts);
- }
-
- case REDUC_MIN_EXPR:
- case REDUC_MAX_EXPR:
- case REDUC_PLUS_EXPR:
- {
- unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
- tree *elts;
- enum tree_code subcode;
-
- if (TREE_CODE (op0) != VECTOR_CST)
- return NULL_TREE;
-
- elts = XALLOCAVEC (tree, nelts);
- if (!vec_cst_ctor_to_array (op0, elts))
- return NULL_TREE;
-
- switch (code)
- {
- case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
- case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
- case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
- default: gcc_unreachable ();
- }
-
- for (i = 1; i < nelts; i++)
- {
- elts[0] = const_binop (subcode, elts[0], elts[i]);
- if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
- return NULL_TREE;
- elts[i] = build_zero_cst (TREE_TYPE (type));
- }
-
- return build_vector (type, elts);
- }
-
default:
return NULL_TREE;
} /* switch (code) */
enum tree_code code0 = TREE_CODE (arg0);
tree t, cst0 = NULL_TREE;
int sgn0;
- bool swap = false;
-
- /* Match A +- CST code arg1 and CST code arg1. We can change the
- first form only if overflow is undefined. */
- if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
- /* In principle pointers also have undefined overflow behavior,
- but that causes problems elsewhere. */
- && !POINTER_TYPE_P (TREE_TYPE (arg0))
- && (code0 == MINUS_EXPR
- || code0 == PLUS_EXPR)
- && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
- || code0 == INTEGER_CST))
+
+ /* Match A +- CST code arg1. We can change this only if overflow
+ is undefined. */
+ if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
+ && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
+ /* In principle pointers also have undefined overflow behavior,
+ but that causes problems elsewhere. */
+ && !POINTER_TYPE_P (TREE_TYPE (arg0))
+ && (code0 == MINUS_EXPR
+ || code0 == PLUS_EXPR)
+ && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
return NULL_TREE;
/* Identify the constant in arg0 and its sign. */
- if (code0 == INTEGER_CST)
- cst0 = arg0;
- else
- cst0 = TREE_OPERAND (arg0, 1);
+ cst0 = TREE_OPERAND (arg0, 1);
sgn0 = tree_int_cst_sgn (cst0);
/* Overflowed constants and zero will cause problems. */
/* See if we can reduce the magnitude of the constant in
arg0 by changing the comparison code. */
- if (code0 == INTEGER_CST)
- {
- /* CST <= arg1 -> CST-1 < arg1. */
- if (code == LE_EXPR && sgn0 == 1)
- code = LT_EXPR;
- /* -CST < arg1 -> -CST-1 <= arg1. */
- else if (code == LT_EXPR && sgn0 == -1)
- code = LE_EXPR;
- /* CST > arg1 -> CST-1 >= arg1. */
- else if (code == GT_EXPR && sgn0 == 1)
- code = GE_EXPR;
- /* -CST >= arg1 -> -CST-1 > arg1. */
- else if (code == GE_EXPR && sgn0 == -1)
- code = GT_EXPR;
- else
- return NULL_TREE;
- /* arg1 code' CST' might be more canonical. */
- swap = true;
- }
+ /* A - CST < arg1 -> A - CST-1 <= arg1. */
+ if (code == LT_EXPR
+ && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
+ code = LE_EXPR;
+ /* A + CST > arg1 -> A + CST-1 >= arg1. */
+ else if (code == GT_EXPR
+ && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
+ code = GE_EXPR;
+ /* A + CST <= arg1 -> A + CST-1 < arg1. */
+ else if (code == LE_EXPR
+ && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
+ code = LT_EXPR;
+ /* A - CST >= arg1 -> A - CST-1 > arg1. */
+ else if (code == GE_EXPR
+ && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
+ code = GT_EXPR;
else
- {
- /* A - CST < arg1 -> A - CST-1 <= arg1. */
- if (code == LT_EXPR
- && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
- code = LE_EXPR;
- /* A + CST > arg1 -> A + CST-1 >= arg1. */
- else if (code == GT_EXPR
- && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
- code = GE_EXPR;
- /* A + CST <= arg1 -> A + CST-1 < arg1. */
- else if (code == LE_EXPR
- && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
- code = LT_EXPR;
- /* A - CST >= arg1 -> A - CST-1 > arg1. */
- else if (code == GE_EXPR
- && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
- code = GT_EXPR;
- else
- return NULL_TREE;
- *strict_overflow_p = true;
- }
+ return NULL_TREE;
+ *strict_overflow_p = true;
/* Now build the constant reduced in magnitude. But not if that
would produce one outside of its types range. */
|| (sgn0 == -1
&& TYPE_MAX_VALUE (TREE_TYPE (cst0))
&& tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
- /* We cannot swap the comparison here as that would cause us to
- endlessly recurse. */
return NULL_TREE;
t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
cst0, build_int_cst (TREE_TYPE (cst0), 1));
- if (code0 != INTEGER_CST)
- t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
+ t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
t = fold_convert (TREE_TYPE (arg1), t);
- /* If swapping might yield to a more canonical form, do so. */
- if (swap)
- return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
- else
- return fold_build2_loc (loc, code, type, t, arg1);
+ return fold_build2_loc (loc, code, type, t, arg1);
}
/* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
STRIP_SIGN_NOPS (arg0);
STRIP_SIGN_NOPS (arg1);
- tem = fold_relational_const (code, type, arg0, arg1);
- if (tem != NULL_TREE)
- return tem;
-
- /* If one arg is a real or integer constant, put it last. */
- if (tree_swap_operands_p (arg0, arg1, true))
- return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
-
/* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
- && (equality_code || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
+ && (equality_code
+ || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
+ && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
&& !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
&& TREE_CODE (arg1) == INTEGER_CST
/* If the constant operation overflowed this can be
simplified as a comparison against INT_MAX/INT_MIN. */
- if (TREE_OVERFLOW (new_const))
+ if (TREE_OVERFLOW (new_const)
+ && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
{
int const1_sgn = tree_int_cst_sgn (const1);
enum tree_code code2 = code;
}
}
- /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
- if (TREE_CODE (arg0) == MINUS_EXPR
- && equality_code
- && integer_zerop (arg1))
- {
- /* ??? The transformation is valid for the other operators if overflow
- is undefined for the type, but performing it here badly interacts
- with the transformation in fold_cond_expr_with_comparison which
- attempts to synthetize ABS_EXPR. */
- if (!equality_code)
- fold_overflow_warning ("assuming signed overflow does not occur "
- "when changing X - Y cmp 0 to X cmp Y",
- WARN_STRICT_OVERFLOW_COMPARISON);
- return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg0, 1));
- }
-
/* For comparisons of pointers we can decompose it to a compile time
comparison of the base objects and the offsets into the object.
This requires at least one operand being an ADDR_EXPR or a
{
tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
- enum machine_mode mode;
+ machine_mode mode;
int volatilep, unsignedp;
bool indirect_base0 = false, indirect_base1 = false;
}
}
- /* A local variable can never be pointed to by
- the default SSA name of an incoming parameter. */
- if ((TREE_CODE (arg0) == ADDR_EXPR
- && indirect_base0
- && TREE_CODE (base0) == VAR_DECL
- && auto_var_in_fn_p (base0, current_function_decl)
- && !indirect_base1
- && TREE_CODE (base1) == SSA_NAME
- && SSA_NAME_IS_DEFAULT_DEF (base1)
- && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
- || (TREE_CODE (arg1) == ADDR_EXPR
- && indirect_base1
- && TREE_CODE (base1) == VAR_DECL
- && auto_var_in_fn_p (base1, current_function_decl)
- && !indirect_base0
- && TREE_CODE (base0) == SSA_NAME
- && SSA_NAME_IS_DEFAULT_DEF (base0)
- && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
- {
- if (code == NE_EXPR)
- return constant_boolean_node (1, type);
- else if (code == EQ_EXPR)
- return constant_boolean_node (0, type);
- }
/* If we have equivalent bases we might be able to simplify. */
- else if (indirect_base0 == indirect_base1
- && operand_equal_p (base0, base1, 0))
+ if (indirect_base0 == indirect_base1
+ && operand_equal_p (base0, base1, 0))
{
/* We can fold this expression to a constant if the non-constant
offset parts are equal. */
}
}
/* For non-equal bases we can simplify if they are addresses
- of local binding decls or constants. */
+ declarations with different addresses. */
else if (indirect_base0 && indirect_base1
/* We know that !operand_equal_p (base0, base1, 0)
because the if condition was false. But make
&& base0 != base1
&& TREE_CODE (arg0) == ADDR_EXPR
&& TREE_CODE (arg1) == ADDR_EXPR
- && (((TREE_CODE (base0) == VAR_DECL
- || TREE_CODE (base0) == PARM_DECL)
- && (targetm.binds_local_p (base0)
- || CONSTANT_CLASS_P (base1)))
- || CONSTANT_CLASS_P (base0))
- && (((TREE_CODE (base1) == VAR_DECL
- || TREE_CODE (base1) == PARM_DECL)
- && (targetm.binds_local_p (base1)
- || CONSTANT_CLASS_P (base0)))
- || CONSTANT_CLASS_P (base1)))
+ && DECL_P (base0)
+ && DECL_P (base1)
+ /* Watch for aliases. */
+ && (!decl_in_symtab_p (base0)
+ || !decl_in_symtab_p (base1)
+ || !symtab_node::get_create (base0)->equal_address_to
+ (symtab_node::get_create (base1))))
{
if (code == EQ_EXPR)
return omit_two_operands_loc (loc, type, boolean_false_node,
X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
the resulting offset is smaller in absolute value than the
original one and has the same sign. */
- if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
+ if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
+ && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
&& (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
&& (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
&& !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
}
}
- /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
- signed arithmetic case. That form is created by the compiler
- often enough for folding it to be of value. One example is in
- computing loop trip counts after Operator Strength Reduction. */
- if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
- && TREE_CODE (arg0) == MULT_EXPR
- && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
- && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
- && integer_zerop (arg1))
- {
- tree const1 = TREE_OPERAND (arg0, 1);
- tree const2 = arg1; /* zero */
- tree variable1 = TREE_OPERAND (arg0, 0);
- enum tree_code cmp_code = code;
-
- /* Handle unfolded multiplication by zero. */
- if (integer_zerop (const1))
- return fold_build2_loc (loc, cmp_code, type, const1, const2);
-
- fold_overflow_warning (("assuming signed overflow does not occur when "
- "eliminating multiplication in comparison "
- "with zero"),
- WARN_STRICT_OVERFLOW_COMPARISON);
-
- /* If const1 is negative we swap the sense of the comparison. */
- if (tree_int_cst_sgn (const1) < 0)
- cmp_code = swap_tree_comparison (cmp_code);
-
- return fold_build2_loc (loc, cmp_code, type, variable1, const2);
- }
-
tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
if (tem)
return tem;
- if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
- {
- tree targ0 = strip_float_extensions (arg0);
- tree targ1 = strip_float_extensions (arg1);
- tree newtype = TREE_TYPE (targ0);
-
- if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
- newtype = TREE_TYPE (targ1);
-
- /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
- if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
- return fold_build2_loc (loc, code, type,
- fold_convert_loc (loc, newtype, targ0),
- fold_convert_loc (loc, newtype, targ1));
-
- /* (-a) CMP (-b) -> b CMP a */
- if (TREE_CODE (arg0) == NEGATE_EXPR
- && TREE_CODE (arg1) == NEGATE_EXPR)
- return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
- TREE_OPERAND (arg0, 0));
-
- if (TREE_CODE (arg1) == REAL_CST)
- {
- REAL_VALUE_TYPE cst;
- cst = TREE_REAL_CST (arg1);
-
- /* (-a) CMP CST -> a swap(CMP) (-CST) */
- if (TREE_CODE (arg0) == NEGATE_EXPR)
- return fold_build2_loc (loc, swap_tree_comparison (code), type,
- TREE_OPERAND (arg0, 0),
- build_real (TREE_TYPE (arg1),
- real_value_negate (&cst)));
-
- /* IEEE doesn't distinguish +0 and -0 in comparisons. */
- /* a CMP (-0) -> a CMP 0 */
- if (REAL_VALUE_MINUS_ZERO (cst))
- return fold_build2_loc (loc, code, type, arg0,
- build_real (TREE_TYPE (arg1), dconst0));
-
- /* x != NaN is always true, other ops are always false. */
- if (REAL_VALUE_ISNAN (cst)
- && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
- {
- tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
- return omit_one_operand_loc (loc, type, tem, arg0);
- }
-
- /* Fold comparisons against infinity. */
- if (REAL_VALUE_ISINF (cst)
- && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
- {
- tem = fold_inf_compare (loc, code, type, arg0, arg1);
- if (tem != NULL_TREE)
- return tem;
- }
- }
-
- /* If this is a comparison of a real constant with a PLUS_EXPR
- or a MINUS_EXPR of a real constant, we can convert it into a
- comparison with a revised real constant as long as no overflow
- occurs when unsafe_math_optimizations are enabled. */
- if (flag_unsafe_math_optimizations
- && TREE_CODE (arg1) == REAL_CST
- && (TREE_CODE (arg0) == PLUS_EXPR
- || TREE_CODE (arg0) == MINUS_EXPR)
- && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
- && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
- ? MINUS_EXPR : PLUS_EXPR,
- arg1, TREE_OPERAND (arg0, 1)))
- && !TREE_OVERFLOW (tem))
- return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
-
- /* Likewise, we can simplify a comparison of a real constant with
- a MINUS_EXPR whose first operand is also a real constant, i.e.
- (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
- floating-point types only if -fassociative-math is set. */
- if (flag_associative_math
- && TREE_CODE (arg1) == REAL_CST
- && TREE_CODE (arg0) == MINUS_EXPR
- && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
- && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
- arg1))
- && !TREE_OVERFLOW (tem))
- return fold_build2_loc (loc, swap_tree_comparison (code), type,
- TREE_OPERAND (arg0, 1), tem);
-
- /* Fold comparisons against built-in math functions. */
- if (TREE_CODE (arg1) == REAL_CST
- && flag_unsafe_math_optimizations
- && ! flag_errno_math)
- {
- enum built_in_function fcode = builtin_mathfn_code (arg0);
-
- if (fcode != END_BUILTINS)
- {
- tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
- if (tem != NULL_TREE)
- return tem;
- }
- }
- }
-
- if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
- && CONVERT_EXPR_P (arg0))
- {
- /* If we are widening one operand of an integer comparison,
- see if the other operand is similarly being widened. Perhaps we
- can do the comparison in the narrower type. */
- tem = fold_widened_comparison (loc, code, type, arg0, arg1);
- if (tem)
- return tem;
-
- /* Or if we are changing signedness. */
- tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
- if (tem)
- return tem;
- }
-
/* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
constant, we can simplify it. */
if (TREE_CODE (arg1) == INTEGER_CST
return tem;
}
- /* Simplify comparison of something with itself. (For IEEE
- floating-point, we can only do some of these simplifications.) */
- if (operand_equal_p (arg0, arg1, 0))
- {
- switch (code)
- {
- case EQ_EXPR:
- if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
- || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
- return constant_boolean_node (1, type);
- break;
-
- case GE_EXPR:
- case LE_EXPR:
- if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
- || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
- return constant_boolean_node (1, type);
- return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
-
- case NE_EXPR:
- /* For NE, we can only do this simplification if integer
- or we don't honor IEEE floating point NaNs. */
- if (FLOAT_TYPE_P (TREE_TYPE (arg0))
- && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
- break;
- /* ... fall through ... */
- case GT_EXPR:
- case LT_EXPR:
- return constant_boolean_node (0, type);
- default:
- gcc_unreachable ();
- }
- }
-
/* If we are comparing an expression that just has comparisons
of two integer values, arithmetic expressions of those comparisons,
and constants, we can simplify it. There are only three cases
return tem;
}
- /* Fold ~X op ~Y as Y op X. */
- if (TREE_CODE (arg0) == BIT_NOT_EXPR
- && TREE_CODE (arg1) == BIT_NOT_EXPR)
- {
- tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
- return fold_build2_loc (loc, code, type,
- fold_convert_loc (loc, cmp_type,
- TREE_OPERAND (arg1, 0)),
- TREE_OPERAND (arg0, 0));
- }
-
- /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
- if (TREE_CODE (arg0) == BIT_NOT_EXPR
- && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
- {
- tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
- return fold_build2_loc (loc, swap_tree_comparison (code), type,
- TREE_OPERAND (arg0, 0),
- fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
- fold_convert_loc (loc, cmp_type, arg1)));
- }
-
return NULL_TREE;
}
}
-/* Subroutine of fold_binary. If P is the value of EXPR, computes
- power-of-two M and (arbitrary) N such that M divides (P-N). This condition
- guarantees that P and N have the same least significant log2(M) bits.
- N is not otherwise constrained. In particular, N is not normalized to
- 0 <= N < M as is common. In general, the precise value of P is unknown.
- M is chosen as large as possible such that constant N can be determined.
-
- Returns M and sets *RESIDUE to N.
-
- If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
- account. This is not always possible due to PR 35705.
- */
-
-static unsigned HOST_WIDE_INT
-get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
- bool allow_func_align)
-{
- enum tree_code code;
-
- *residue = 0;
-
- code = TREE_CODE (expr);
- if (code == ADDR_EXPR)
- {
- unsigned int bitalign;
- get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
- *residue /= BITS_PER_UNIT;
- return bitalign / BITS_PER_UNIT;
- }
- else if (code == POINTER_PLUS_EXPR)
- {
- tree op0, op1;
- unsigned HOST_WIDE_INT modulus;
- enum tree_code inner_code;
-
- op0 = TREE_OPERAND (expr, 0);
- STRIP_NOPS (op0);
- modulus = get_pointer_modulus_and_residue (op0, residue,
- allow_func_align);
-
- op1 = TREE_OPERAND (expr, 1);
- STRIP_NOPS (op1);
- inner_code = TREE_CODE (op1);
- if (inner_code == INTEGER_CST)
- {
- *residue += TREE_INT_CST_LOW (op1);
- return modulus;
- }
- else if (inner_code == MULT_EXPR)
- {
- op1 = TREE_OPERAND (op1, 1);
- if (TREE_CODE (op1) == INTEGER_CST)
- {
- unsigned HOST_WIDE_INT align;
-
- /* Compute the greatest power-of-2 divisor of op1. */
- align = TREE_INT_CST_LOW (op1);
- align &= -align;
-
- /* If align is non-zero and less than *modulus, replace
- *modulus with align., If align is 0, then either op1 is 0
- or the greatest power-of-2 divisor of op1 doesn't fit in an
- unsigned HOST_WIDE_INT. In either case, no additional
- constraint is imposed. */
- if (align)
- modulus = MIN (modulus, align);
-
- return modulus;
- }
- }
- }
-
- /* If we get here, we were unable to determine anything useful about the
- expression. */
- return 1;
-}
-
/* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
CONSTRUCTOR ARG into array ELTS and return true if successful. */
/* If the real or vector real constant CST of type TYPE has an exact
inverse, return it, else return NULL. */
-static tree
+tree
exact_inverse (tree type, tree cst)
{
REAL_VALUE_TYPE r;
tree unit_type, *elts;
- enum machine_mode mode;
+ machine_mode mode;
unsigned vec_nelts, i;
switch (TREE_CODE (cst))
/* Note that TREE_CONSTANT isn't enough: static var addresses are
constant but we can't do arithmetic on them. */
- if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
- || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
- || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
- || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
- || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
- || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
- || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
+ if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
{
- if (kind == tcc_binary)
- {
- /* Make sure type and arg0 have the same saturating flag. */
- gcc_assert (TYPE_SATURATING (type)
- == TYPE_SATURATING (TREE_TYPE (arg0)));
- tem = const_binop (code, arg0, arg1);
- }
- else if (kind == tcc_comparison)
- tem = fold_relational_const (code, type, arg0, arg1);
- else
- tem = NULL_TREE;
-
+ tem = const_binop (code, type, arg0, arg1);
if (tem != NULL_TREE)
{
if (TREE_TYPE (tem) != type)
&& tree_swap_operands_p (arg0, arg1, true))
return fold_build2_loc (loc, code, type, op1, op0);
+ /* Likewise if this is a comparison, and ARG0 is a constant, move it
+ to ARG1 to reduce the number of tests below. */
+ if (kind == tcc_comparison
+ && tree_swap_operands_p (arg0, arg1, true))
+ return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
+
+ tem = generic_simplify (loc, code, type, op0, op1);
+ if (tem)
+ return tem;
+
/* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
First check for cases where an arithmetic operation is applied to a
return NULL_TREE;
case POINTER_PLUS_EXPR:
- /* 0 +p index -> (type)index */
- if (integer_zerop (arg0))
- return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
-
- /* PTR +p 0 -> PTR */
- if (integer_zerop (arg1))
- return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
-
/* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
&& INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
fold_convert_loc (loc, sizetype,
arg0)));
- /* (PTR +p B) +p A -> PTR +p (B + A) */
- if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
- {
- tree inner;
- tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
- tree arg00 = TREE_OPERAND (arg0, 0);
- inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
- arg01, fold_convert_loc (loc, sizetype, arg1));
- return fold_convert_loc (loc, type,
- fold_build_pointer_plus_loc (loc,
- arg00, inner));
- }
-
- /* PTR_CST +p CST -> CST1 */
- if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
- return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
- fold_convert_loc (loc, type, arg1));
-
return NULL_TREE;
case PLUS_EXPR:
- /* A + (-B) -> A - B */
- if (TREE_CODE (arg1) == NEGATE_EXPR
- && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
- return fold_build2_loc (loc, MINUS_EXPR, type,
- fold_convert_loc (loc, type, arg0),
- fold_convert_loc (loc, type,
- TREE_OPERAND (arg1, 0)));
- /* (-A) + B -> B - A */
- if (TREE_CODE (arg0) == NEGATE_EXPR
- && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1)
- && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
- return fold_build2_loc (loc, MINUS_EXPR, type,
- fold_convert_loc (loc, type, arg1),
- fold_convert_loc (loc, type,
- TREE_OPERAND (arg0, 0)));
-
if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
{
- /* Convert ~A + 1 to -A. */
- if (TREE_CODE (arg0) == BIT_NOT_EXPR
- && integer_onep (arg1))
- return fold_build1_loc (loc, NEGATE_EXPR, type,
- fold_convert_loc (loc, type,
- TREE_OPERAND (arg0, 0)));
-
- /* ~X + X is -1. */
- if (TREE_CODE (arg0) == BIT_NOT_EXPR
- && !TYPE_OVERFLOW_TRAPS (type))
- {
- tree tem = TREE_OPERAND (arg0, 0);
-
- STRIP_NOPS (tem);
- if (operand_equal_p (tem, arg1, 0))
- {
- t1 = build_all_ones_cst (type);
- return omit_one_operand_loc (loc, type, t1, arg1);
- }
- }
-
- /* X + ~X is -1. */
- if (TREE_CODE (arg1) == BIT_NOT_EXPR
- && !TYPE_OVERFLOW_TRAPS (type))
- {
- tree tem = TREE_OPERAND (arg1, 0);
-
- STRIP_NOPS (tem);
- if (operand_equal_p (arg0, tem, 0))
- {
- t1 = build_all_ones_cst (type);
- return omit_one_operand_loc (loc, type, t1, arg0);
- }
- }
-
/* X + (X / CST) * -CST is X % CST. */
if (TREE_CODE (arg1) == MULT_EXPR
&& TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
if (! FLOAT_TYPE_P (type))
{
- if (integer_zerop (arg1))
- return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
-
- /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
- with a constant, and the two constants have no bits in common,
- we should treat this as a BIT_IOR_EXPR since this may produce more
- simplifications. */
- if (TREE_CODE (arg0) == BIT_AND_EXPR
- && TREE_CODE (arg1) == BIT_AND_EXPR
- && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
- && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
- && wi::bit_and (TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 1)) == 0)
- {
- code = BIT_IOR_EXPR;
- goto bit_ior;
- }
-
/* Reassociate (plus (plus (mult) (foo)) (mult)) as
(plus (plus (mult) (mult)) (foo)) so that we can
take advantage of the factoring cases below. */
- if (TYPE_OVERFLOW_WRAPS (type)
+ if (ANY_INTEGRAL_TYPE_P (type)
+ && TYPE_OVERFLOW_WRAPS (type)
&& (((TREE_CODE (arg0) == PLUS_EXPR
|| TREE_CODE (arg0) == MINUS_EXPR)
&& TREE_CODE (arg1) == MULT_EXPR)
}
else
{
- /* See if ARG1 is zero and X + ARG1 reduces to X. */
- if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
- return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
-
- /* Likewise if the operands are reversed. */
- if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
- return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
-
- /* Convert X + -C into X - C. */
- if (TREE_CODE (arg1) == REAL_CST
- && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
- {
- tem = fold_negate_const (arg1, type);
- if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
- return fold_build2_loc (loc, MINUS_EXPR, type,
- fold_convert_loc (loc, type, arg0),
- fold_convert_loc (loc, type, tem));
- }
-
/* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
to __complex__ ( x, y ). This is not the same for SNaNs or
if signed zeros are involved. */
- if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
- && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
+ if (!HONOR_SNANS (element_mode (arg0))
+ && !HONOR_SIGNED_ZEROS (element_mode (arg0))
&& COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
{
tree rtype = TREE_TYPE (TREE_TYPE (arg0));
&& (tem = distribute_real_division (loc, code, type, arg0, arg1)))
return tem;
- /* Convert x+x into x*2.0. */
- if (operand_equal_p (arg0, arg1, 0)
- && SCALAR_FLOAT_TYPE_P (type))
- return fold_build2_loc (loc, MULT_EXPR, type, arg0,
- build_real (type, dconst2));
-
/* Convert a + (b*c + d*e) into (a + b*c) + d*e.
We associate floats only if the user has specified
-fassociative-math. */
tem = build2_loc (loc, LROTATE_EXPR,
TREE_TYPE (TREE_OPERAND (arg0, 0)),
TREE_OPERAND (arg0, 0),
- code0 == LSHIFT_EXPR ? tree01 : tree11);
+ code0 == LSHIFT_EXPR
+ ? TREE_OPERAND (arg0, 1)
+ : TREE_OPERAND (arg1, 1));
return fold_convert_loc (loc, type, tem);
}
else if (code11 == MINUS_EXPR)
? LROTATE_EXPR
: RROTATE_EXPR),
TREE_TYPE (TREE_OPERAND (arg0, 0)),
- TREE_OPERAND (arg0, 0), tree01));
+ TREE_OPERAND (arg0, 0),
+ TREE_OPERAND (arg0, 1)));
}
else if (code01 == MINUS_EXPR)
{
? LROTATE_EXPR
: RROTATE_EXPR),
TREE_TYPE (TREE_OPERAND (arg0, 0)),
- TREE_OPERAND (arg0, 0), tree11));
+ TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
}
}
}
/* Don't introduce overflows through reassociation. */
if (!any_overflows
- && ((lit0 && TREE_OVERFLOW (lit0))
- || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
+ && ((lit0 && TREE_OVERFLOW_P (lit0))
+ || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
return NULL_TREE;
if (minus_lit0)
TREE_OPERAND (arg1, 0));
tree arg11 = fold_convert_loc (loc, type,
TREE_OPERAND (arg1, 1));
- tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg0,
- fold_convert_loc (loc, type, arg10));
+ tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
+ fold_convert_loc (loc, type, arg0),
+ arg10);
if (tmp)
return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
}
}
- /* A - (-B) -> A + B */
- if (TREE_CODE (arg1) == NEGATE_EXPR)
- return fold_build2_loc (loc, PLUS_EXPR, type, op0,
- fold_convert_loc (loc, type,
- TREE_OPERAND (arg1, 0)));
/* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
if (TREE_CODE (arg0) == NEGATE_EXPR
&& negate_expr_p (arg1)
negate_expr (arg1)),
fold_convert_loc (loc, type,
TREE_OPERAND (arg0, 0)));
- /* Convert -A - 1 to ~A. */
- if (TREE_CODE (type) != COMPLEX_TYPE
- && TREE_CODE (arg0) == NEGATE_EXPR
- && integer_onep (arg1)
- && !TYPE_OVERFLOW_TRAPS (type))
- return fold_build1_loc (loc, BIT_NOT_EXPR, type,
- fold_convert_loc (loc, type,
- TREE_OPERAND (arg0, 0)));
-
- /* Convert -1 - A to ~A. */
- if (TREE_CODE (type) != COMPLEX_TYPE
- && integer_all_onesp (arg0))
- return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
-
-
- /* X - (X / Y) * Y is X % Y. */
- if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
- && TREE_CODE (arg1) == MULT_EXPR
- && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
- && operand_equal_p (arg0,
- TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
- && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
- TREE_OPERAND (arg1, 1), 0))
- return
- fold_convert_loc (loc, type,
- fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
- arg0, TREE_OPERAND (arg1, 1)));
if (! FLOAT_TYPE_P (type))
{
- if (integer_zerop (arg0))
- return negate_expr (fold_convert_loc (loc, type, arg1));
- if (integer_zerop (arg1))
- return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
-
- /* Fold A - (A & B) into ~B & A. */
- if (!TREE_SIDE_EFFECTS (arg0)
- && TREE_CODE (arg1) == BIT_AND_EXPR)
- {
- if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
- {
- tree arg10 = fold_convert_loc (loc, type,
- TREE_OPERAND (arg1, 0));
- return fold_build2_loc (loc, BIT_AND_EXPR, type,
- fold_build1_loc (loc, BIT_NOT_EXPR,
- type, arg10),
- fold_convert_loc (loc, type, arg0));
- }
- if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
- {
- tree arg11 = fold_convert_loc (loc,
- type, TREE_OPERAND (arg1, 1));
- return fold_build2_loc (loc, BIT_AND_EXPR, type,
- fold_build1_loc (loc, BIT_NOT_EXPR,
- type, arg11),
- fold_convert_loc (loc, type, arg0));
- }
- }
-
/* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
any power of 2 minus 1. */
if (TREE_CODE (arg0) == BIT_AND_EXPR
}
}
- /* See if ARG1 is zero and X - ARG1 reduces to X. */
- else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
- return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
-
- /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
- ARG0 is zero and X + ARG0 reduces to X, since that would mean
- (-ARG1 + ARG0) reduces to -ARG1. */
- else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
- return negate_expr (fold_convert_loc (loc, type, arg1));
-
/* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
__complex__ ( x, -y ). This is not the same for SNaNs or if
signed zeros are involved. */
- if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
- && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
+ if (!HONOR_SNANS (element_mode (arg0))
+ && !HONOR_SIGNED_ZEROS (element_mode (arg0))
&& COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
{
tree rtype = TREE_TYPE (TREE_TYPE (arg0));
}
}
- /* Fold &x - &x. This can happen from &x.foo - &x.
- This is unsafe for certain floats even in non-IEEE formats.
- In IEEE, it is unsafe because it does wrong for NaNs.
- Also note that operand_equal_p is always false if an operand
- is volatile. */
-
- if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
- && operand_equal_p (arg0, arg1, 0))
- return build_zero_cst (type);
-
/* A - B -> A + (-B) if B is easily negatable. */
if (negate_expr_p (arg1)
+ && !TYPE_OVERFLOW_SANITIZED (type)
&& ((FLOAT_TYPE_P (type)
/* Avoid this transformation if B is a positive REAL_CST. */
&& (TREE_CODE (arg1) != REAL_CST
fold_convert_loc (loc, type,
negate_expr (arg1)));
- /* Try folding difference of addresses. */
- {
- HOST_WIDE_INT diff;
-
- if ((TREE_CODE (arg0) == ADDR_EXPR
- || TREE_CODE (arg1) == ADDR_EXPR)
- && ptr_difference_const (arg0, arg1, &diff))
- return build_int_cst_type (type, diff);
- }
-
/* Fold &a[i] - &a[j] to i-j. */
if (TREE_CODE (arg0) == ADDR_EXPR
&& TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
if (! FLOAT_TYPE_P (type))
{
- if (integer_zerop (arg1))
- return omit_one_operand_loc (loc, type, arg1, arg0);
- if (integer_onep (arg1))
- return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
- /* Transform x * -1 into -x. Make sure to do the negation
- on the original operand with conversions not stripped
- because we can only strip non-sign-changing conversions. */
- if (integer_minus_onep (arg1))
- return fold_convert_loc (loc, type, negate_expr (op0));
/* Transform x * -C into -x * C if x is easily negatable. */
if (TREE_CODE (arg1) == INTEGER_CST
&& tree_int_cst_sgn (arg1) == -1
}
else
{
- /* Maybe fold x * 0 to 0. The expressions aren't the same
- when x is NaN, since x * 0 is also NaN. Nor are they the
- same in modes with signed zeros, since multiplying a
- negative value by 0 gives -0, not +0. */
- if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
- && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
- && real_zerop (arg1))
- return omit_one_operand_loc (loc, type, arg1, arg0);
- /* In IEEE floating point, x*1 is not equivalent to x for snans.
- Likewise for complex arithmetic with signed zeros. */
- if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
- && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
- || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
- && real_onep (arg1))
- return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
-
- /* Transform x * -1.0 into -x. */
- if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
- && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
- || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
- && real_minus_onep (arg1))
- return fold_convert_loc (loc, type, negate_expr (arg0));
-
/* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
the result for floating point types due to rounding so it is applied
only if -fassociative-math was specify. */
/* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
This is not the same for NaNs or if signed zeros are
involved. */
- if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
- && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
+ if (!HONOR_NANS (arg0)
+ && !HONOR_SIGNED_ZEROS (element_mode (arg0))
&& COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
&& TREE_CODE (arg1) == COMPLEX_CST
&& real_zerop (TREE_REALPART (arg1)))
/* Optimize sqrt(x)*sqrt(x) as x. */
if (BUILTIN_SQRT_P (fcode0)
&& operand_equal_p (arg00, arg10, 0)
- && ! HONOR_SNANS (TYPE_MODE (type)))
+ && ! HONOR_SNANS (element_mode (type)))
return arg00;
/* Optimize root(x)*root(y) as root(x*y). */
goto associate;
case BIT_IOR_EXPR:
- bit_ior:
- if (integer_all_onesp (arg1))
- return omit_one_operand_loc (loc, type, arg1, arg0);
- if (integer_zerop (arg1))
- return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
- if (operand_equal_p (arg0, arg1, 0))
- return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
-
- /* ~X | X is -1. */
- if (TREE_CODE (arg0) == BIT_NOT_EXPR
- && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
- {
- t1 = build_zero_cst (type);
- t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
- return omit_one_operand_loc (loc, type, t1, arg1);
- }
-
- /* X | ~X is -1. */
- if (TREE_CODE (arg1) == BIT_NOT_EXPR
- && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
- {
- t1 = build_zero_cst (type);
- t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
- return omit_one_operand_loc (loc, type, t1, arg0);
- }
-
/* Canonicalize (X & C1) | C2. */
if (TREE_CODE (arg0) == BIT_AND_EXPR
&& TREE_CODE (arg1) == INTEGER_CST
arg1);
}
- /* (X & Y) | Y is (X, Y). */
- if (TREE_CODE (arg0) == BIT_AND_EXPR
- && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
- return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
- /* (X & Y) | X is (Y, X). */
- if (TREE_CODE (arg0) == BIT_AND_EXPR
- && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
- && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
- return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
- /* X | (X & Y) is (Y, X). */
- if (TREE_CODE (arg1) == BIT_AND_EXPR
- && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
- && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
- return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
- /* X | (Y & X) is (Y, X). */
- if (TREE_CODE (arg1) == BIT_AND_EXPR
- && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
- && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
- return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
-
/* (X & ~Y) | (~X & Y) is X ^ Y */
if (TREE_CODE (arg0) == BIT_AND_EXPR
&& TREE_CODE (arg1) == BIT_AND_EXPR)
return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
}
- t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
- if (t1 != NULL_TREE)
- return t1;
-
- /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
-
- This results in more efficient code for machines without a NAND
- instruction. Combine will canonicalize to the first form
- which will allow use of NAND instructions provided by the
- backend if they exist. */
- if (TREE_CODE (arg0) == BIT_NOT_EXPR
- && TREE_CODE (arg1) == BIT_NOT_EXPR)
- {
- return
- fold_build1_loc (loc, BIT_NOT_EXPR, type,
- build2 (BIT_AND_EXPR, type,
- fold_convert_loc (loc, type,
- TREE_OPERAND (arg0, 0)),
- fold_convert_loc (loc, type,
- TREE_OPERAND (arg1, 0))));
- }
-
/* See if this can be simplified into a rotate first. If that
is unsuccessful continue in the association code. */
goto bit_rotate;
case BIT_XOR_EXPR:
- if (integer_zerop (arg1))
- return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
- if (integer_all_onesp (arg1))
- return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
- if (operand_equal_p (arg0, arg1, 0))
- return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
-
- /* ~X ^ X is -1. */
- if (TREE_CODE (arg0) == BIT_NOT_EXPR
- && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
- {
- t1 = build_zero_cst (type);
- t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
- return omit_one_operand_loc (loc, type, t1, arg1);
- }
-
- /* X ^ ~X is -1. */
- if (TREE_CODE (arg1) == BIT_NOT_EXPR
- && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
- {
- t1 = build_zero_cst (type);
- t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
- return omit_one_operand_loc (loc, type, t1, arg0);
- }
-
- /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
- with a constant, and the two constants have no bits in common,
- we should treat this as a BIT_IOR_EXPR since this may produce more
- simplifications. */
- if (TREE_CODE (arg0) == BIT_AND_EXPR
- && TREE_CODE (arg1) == BIT_AND_EXPR
- && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
- && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
- && wi::bit_and (TREE_OPERAND (arg0, 1),
- TREE_OPERAND (arg1, 1)) == 0)
- {
- code = BIT_IOR_EXPR;
- goto bit_ior;
- }
-
- /* (X | Y) ^ X -> Y & ~ X*/
- if (TREE_CODE (arg0) == BIT_IOR_EXPR
- && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
- {
- tree t2 = TREE_OPERAND (arg0, 1);
- t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
- arg1);
- t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
- fold_convert_loc (loc, type, t2),
- fold_convert_loc (loc, type, t1));
- return t1;
- }
-
- /* (Y | X) ^ X -> Y & ~ X*/
- if (TREE_CODE (arg0) == BIT_IOR_EXPR
- && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
- {
- tree t2 = TREE_OPERAND (arg0, 0);
- t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
- arg1);
- t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
- fold_convert_loc (loc, type, t2),
- fold_convert_loc (loc, type, t1));
- return t1;
- }
-
- /* X ^ (X | Y) -> Y & ~ X*/
- if (TREE_CODE (arg1) == BIT_IOR_EXPR
- && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
- {
- tree t2 = TREE_OPERAND (arg1, 1);
- t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
- arg0);
- t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
- fold_convert_loc (loc, type, t2),
- fold_convert_loc (loc, type, t1));
- return t1;
- }
-
- /* X ^ (Y | X) -> Y & ~ X*/
- if (TREE_CODE (arg1) == BIT_IOR_EXPR
- && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
- {
- tree t2 = TREE_OPERAND (arg1, 0);
- t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
- arg0);
- t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
- fold_convert_loc (loc, type, t2),
- fold_convert_loc (loc, type, t1));
- return t1;
- }
-
- /* Convert ~X ^ ~Y to X ^ Y. */
- if (TREE_CODE (arg0) == BIT_NOT_EXPR
- && TREE_CODE (arg1) == BIT_NOT_EXPR)
- return fold_build2_loc (loc, code, type,
- fold_convert_loc (loc, type,
- TREE_OPERAND (arg0, 0)),
- fold_convert_loc (loc, type,
- TREE_OPERAND (arg1, 0)));
-
- /* Convert ~X ^ C to X ^ ~C. */
- if (TREE_CODE (arg0) == BIT_NOT_EXPR
- && TREE_CODE (arg1) == INTEGER_CST)
- return fold_build2_loc (loc, code, type,
- fold_convert_loc (loc, type,
- TREE_OPERAND (arg0, 0)),
- fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
-
/* Fold (X & 1) ^ 1 as (X & 1) == 0. */
if (TREE_CODE (arg0) == BIT_AND_EXPR
+ && INTEGRAL_TYPE_P (type)
&& integer_onep (TREE_OPERAND (arg0, 1))
&& integer_onep (arg1))
return fold_build2_loc (loc, EQ_EXPR, type, arg0,
build_zero_cst (TREE_TYPE (arg0)));
- /* Fold (X & Y) ^ Y as ~X & Y. */
- if (TREE_CODE (arg0) == BIT_AND_EXPR
- && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
- {
- tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
- return fold_build2_loc (loc, BIT_AND_EXPR, type,
- fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
- fold_convert_loc (loc, type, arg1));
- }
- /* Fold (X & Y) ^ X as ~Y & X. */
- if (TREE_CODE (arg0) == BIT_AND_EXPR
- && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
- && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
- {
- tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
- return fold_build2_loc (loc, BIT_AND_EXPR, type,
- fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
- fold_convert_loc (loc, type, arg1));
- }
- /* Fold X ^ (X & Y) as X & ~Y. */
- if (TREE_CODE (arg1) == BIT_AND_EXPR
- && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
- {
- tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
- return fold_build2_loc (loc, BIT_AND_EXPR, type,
- fold_convert_loc (loc, type, arg0),
- fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
- }
- /* Fold X ^ (Y & X) as ~Y & X. */
- if (TREE_CODE (arg1) == BIT_AND_EXPR
- && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
- && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
- {
- tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
- return fold_build2_loc (loc, BIT_AND_EXPR, type,
- fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
- fold_convert_loc (loc, type, arg0));
- }
-
/* See if this can be simplified into a rotate first. If that
is unsuccessful continue in the association code. */
goto bit_rotate;
case BIT_AND_EXPR:
- if (integer_all_onesp (arg1))
- return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
- if (integer_zerop (arg1))
- return omit_one_operand_loc (loc, type, arg1, arg0);
- if (operand_equal_p (arg0, arg1, 0))
- return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
-
/* ~X & X, (X == 0) & X, and !X & X are always zero. */
if ((TREE_CODE (arg0) == BIT_NOT_EXPR
|| TREE_CODE (arg0) == TRUTH_NOT_EXPR
&& operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
- /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
- if (TREE_CODE (arg0) == BIT_IOR_EXPR
- && TREE_CODE (arg1) == INTEGER_CST
- && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
- {
- tree tmp1 = fold_convert_loc (loc, type, arg1);
- tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
- tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
- tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
- tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
- return
- fold_convert_loc (loc, type,
- fold_build2_loc (loc, BIT_IOR_EXPR,
- type, tmp2, tmp3));
- }
-
- /* (X | Y) & Y is (X, Y). */
- if (TREE_CODE (arg0) == BIT_IOR_EXPR
- && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
- return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
- /* (X | Y) & X is (Y, X). */
- if (TREE_CODE (arg0) == BIT_IOR_EXPR
- && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
- && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
- return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
- /* X & (X | Y) is (Y, X). */
- if (TREE_CODE (arg1) == BIT_IOR_EXPR
- && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
- && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
- return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
- /* X & (Y | X) is (Y, X). */
- if (TREE_CODE (arg1) == BIT_IOR_EXPR
- && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
- && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
- return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
-
/* Fold (X ^ 1) & 1 as (X & 1) == 0. */
if (TREE_CODE (arg0) == BIT_XOR_EXPR
+ && INTEGRAL_TYPE_P (type)
&& integer_onep (TREE_OPERAND (arg0, 1))
&& integer_onep (arg1))
{
}
/* Fold ~X & 1 as (X & 1) == 0. */
if (TREE_CODE (arg0) == BIT_NOT_EXPR
+ && INTEGRAL_TYPE_P (type)
&& integer_onep (arg1))
{
tree tem2;
tem = fold_build2_loc (loc, MINUS_EXPR, utype,
pmop[0], pmop[1]);
/* TEM is now the new binary +, - or unary - replacement. */
- tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
- fold_convert_loc (loc, utype, arg1));
- return fold_convert_loc (loc, type, tem);
- }
- }
- }
-
- t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
- if (t1 != NULL_TREE)
- return t1;
- /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
- if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
- && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
- {
- prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
-
- wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
- if (mask == -1)
- return
- fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
- }
-
- /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
-
- This results in more efficient code for machines without a NOR
- instruction. Combine will canonicalize to the first form
- which will allow use of NOR instructions provided by the
- backend if they exist. */
- if (TREE_CODE (arg0) == BIT_NOT_EXPR
- && TREE_CODE (arg1) == BIT_NOT_EXPR)
- {
- return fold_build1_loc (loc, BIT_NOT_EXPR, type,
- build2 (BIT_IOR_EXPR, type,
- fold_convert_loc (loc, type,
- TREE_OPERAND (arg0, 0)),
- fold_convert_loc (loc, type,
- TREE_OPERAND (arg1, 0))));
- }
-
- /* If arg0 is derived from the address of an object or function, we may
- be able to fold this expression using the object or function's
- alignment. */
- if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
- {
- unsigned HOST_WIDE_INT modulus, residue;
- unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
-
- modulus = get_pointer_modulus_and_residue (arg0, &residue,
- integer_onep (arg1));
-
- /* This works because modulus is a power of 2. If this weren't the
- case, we'd have to replace it by its greatest power-of-2
- divisor: modulus & -modulus. */
- if (low < modulus)
- return build_int_cst (type, residue & low);
- }
-
- /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
- (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
- if the new mask might be further optimized. */
- if ((TREE_CODE (arg0) == LSHIFT_EXPR
- || TREE_CODE (arg0) == RSHIFT_EXPR)
- && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
- && TREE_CODE (arg1) == INTEGER_CST
- && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
- && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
- && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
- < TYPE_PRECISION (TREE_TYPE (arg0))))
- {
- unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
- unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
- unsigned HOST_WIDE_INT newmask, zerobits = 0;
- tree shift_type = TREE_TYPE (arg0);
-
- if (TREE_CODE (arg0) == LSHIFT_EXPR)
- zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
- else if (TREE_CODE (arg0) == RSHIFT_EXPR
- && TYPE_PRECISION (TREE_TYPE (arg0))
- == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
- {
- prec = TYPE_PRECISION (TREE_TYPE (arg0));
- tree arg00 = TREE_OPERAND (arg0, 0);
- /* See if more bits can be proven as zero because of
- zero extension. */
- if (TREE_CODE (arg00) == NOP_EXPR
- && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
- {
- tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
- if (TYPE_PRECISION (inner_type)
- == GET_MODE_PRECISION (TYPE_MODE (inner_type))
- && TYPE_PRECISION (inner_type) < prec)
- {
- prec = TYPE_PRECISION (inner_type);
- /* See if we can shorten the right shift. */
- if (shiftc < prec)
- shift_type = inner_type;
- /* Otherwise X >> C1 is all zeros, so we'll optimize
- it into (X, 0) later on by making sure zerobits
- is all ones. */
- }
- }
- zerobits = ~(unsigned HOST_WIDE_INT) 0;
- if (shiftc < prec)
- {
- zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
- zerobits <<= prec - shiftc;
- }
- /* For arithmetic shift if sign bit could be set, zerobits
- can contain actually sign bits, so no transformation is
- possible, unless MASK masks them all away. In that
- case the shift needs to be converted into logical shift. */
- if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
- && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
- {
- if ((mask & zerobits) == 0)
- shift_type = unsigned_type_for (TREE_TYPE (arg0));
- else
- zerobits = 0;
- }
- }
-
- /* ((X << 16) & 0xff00) is (X, 0). */
- if ((mask & zerobits) == mask)
- return omit_one_operand_loc (loc, type,
- build_int_cst (type, 0), arg0);
-
- newmask = mask | zerobits;
- if (newmask != mask && (newmask & (newmask + 1)) == 0)
- {
- /* Only do the transformation if NEWMASK is some integer
- mode's mask. */
- for (prec = BITS_PER_UNIT;
- prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
- if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
- break;
- if (prec < HOST_BITS_PER_WIDE_INT
- || newmask == ~(unsigned HOST_WIDE_INT) 0)
- {
- tree newmaskt;
-
- if (shift_type != TREE_TYPE (arg0))
- {
- tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
- fold_convert_loc (loc, shift_type,
- TREE_OPERAND (arg0, 0)),
- TREE_OPERAND (arg0, 1));
- tem = fold_convert_loc (loc, type, tem);
- }
- else
- tem = op0;
- newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
- if (!tree_int_cst_equal (newmaskt, arg1))
- return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
+ tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
+ fold_convert_loc (loc, utype, arg1));
+ return fold_convert_loc (loc, type, tem);
}
}
}
+ /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
+ if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
+ && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
+ {
+ prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
+
+ wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
+ if (mask == -1)
+ return
+ fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
+ }
+
goto associate;
case RDIV_EXPR:
&& real_zerop (arg1))
return NULL_TREE;
- /* Optimize A / A to 1.0 if we don't care about
- NaNs or Infinities. Skip the transformation
- for non-real operands. */
- if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
- && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
- && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
- && operand_equal_p (arg0, arg1, 0))
- {
- tree r = build_real (TREE_TYPE (arg0), dconst1);
-
- return omit_two_operands_loc (loc, type, r, arg0, arg1);
- }
-
- /* The complex version of the above A / A optimization. */
- if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
- && operand_equal_p (arg0, arg1, 0))
- {
- tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
- if (! HONOR_NANS (TYPE_MODE (elem_type))
- && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
- {
- tree r = build_real (elem_type, dconst1);
- /* omit_two_operands will call fold_convert for us. */
- return omit_two_operands_loc (loc, type, r, arg0, arg1);
- }
- }
-
/* (-A) / (-B) -> A / B */
if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
return fold_build2_loc (loc, RDIV_EXPR, type,
negate_expr (arg0),
TREE_OPERAND (arg1, 0));
- /* In IEEE floating point, x/1 is not equivalent to x for snans. */
- if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
- && real_onep (arg1))
- return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
-
- /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
- if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
- && real_minus_onep (arg1))
- return non_lvalue_loc (loc, fold_convert_loc (loc, type,
- negate_expr (arg0)));
-
- /* If ARG1 is a constant, we can convert this to a multiply by the
- reciprocal. This does not have the same rounding properties,
- so only do this if -freciprocal-math. We can actually
- always safely do it if ARG1 is a power of two, but it's hard to
- tell if it is or not in a portable manner. */
- if (optimize
- && (TREE_CODE (arg1) == REAL_CST
- || (TREE_CODE (arg1) == COMPLEX_CST
- && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
- || (TREE_CODE (arg1) == VECTOR_CST
- && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
- {
- if (flag_reciprocal_math
- && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
- return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
- /* Find the reciprocal if optimizing and the result is exact.
- TODO: Complex reciprocal not implemented. */
- if (TREE_CODE (arg1) != COMPLEX_CST)
- {
- tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
-
- if (inverse)
- return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
- }
- }
/* Convert A/B/C to A/(B*C). */
if (flag_reciprocal_math
&& TREE_CODE (arg0) == RDIV_EXPR)
tree arg00 = CALL_EXPR_ARG (arg0, 0);
tree arg01 = CALL_EXPR_ARG (arg1, 0);
- if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
- && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
+ if (! HONOR_NANS (arg00)
+ && ! HONOR_INFINITIES (element_mode (arg00))
&& operand_equal_p (arg00, arg01, 0))
{
tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
tree arg00 = CALL_EXPR_ARG (arg0, 0);
tree arg01 = CALL_EXPR_ARG (arg1, 0);
- if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
- && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
+ if (! HONOR_NANS (arg00)
+ && ! HONOR_INFINITIES (element_mode (arg00))
&& operand_equal_p (arg00, arg01, 0))
{
tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
}
}
- /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
- TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
- if (INTEGRAL_TYPE_P (type)
- && TYPE_UNSIGNED (type)
- && code == FLOOR_DIV_EXPR)
- return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
-
/* Fall through */
case ROUND_DIV_EXPR:
case CEIL_DIV_EXPR:
case EXACT_DIV_EXPR:
- if (integer_onep (arg1))
- return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
if (integer_zerop (arg1))
return NULL_TREE;
- /* X / -1 is -X. */
- if (!TYPE_UNSIGNED (type)
- && TREE_CODE (arg1) == INTEGER_CST
- && wi::eq_p (arg1, -1))
- return fold_convert_loc (loc, type, negate_expr (arg0));
/* Convert -A / -B to A / B when the type is signed and overflow is
undefined. */
case FLOOR_MOD_EXPR:
case ROUND_MOD_EXPR:
case TRUNC_MOD_EXPR:
- /* X % 1 is always zero, but be sure to preserve any side
- effects in X. */
- if (integer_onep (arg1))
- return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
-
- /* X % 0, return X % 0 unchanged so that we can get the
- proper warnings and errors. */
- if (integer_zerop (arg1))
- return NULL_TREE;
-
- /* 0 % X is always zero, but be sure to preserve any side
- effects in X. Place this after checking for X == 0. */
- if (integer_zerop (arg0))
- return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
-
- /* X % -1 is zero. */
- if (!TYPE_UNSIGNED (type)
- && TREE_CODE (arg1) == INTEGER_CST
- && wi::eq_p (arg1, -1))
- return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
-
- /* X % -C is the same as X % C. */
- if (code == TRUNC_MOD_EXPR
- && TYPE_SIGN (type) == SIGNED
- && TREE_CODE (arg1) == INTEGER_CST
- && !TREE_OVERFLOW (arg1)
- && wi::neg_p (arg1)
- && !TYPE_OVERFLOW_TRAPS (type)
- /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
- && !sign_bit_p (arg1, arg1))
- return fold_build2_loc (loc, code, type,
- fold_convert_loc (loc, type, arg0),
- fold_convert_loc (loc, type,
- negate_expr (arg1)));
-
- /* X % -Y is the same as X % Y. */
- if (code == TRUNC_MOD_EXPR
- && !TYPE_UNSIGNED (type)
- && TREE_CODE (arg1) == NEGATE_EXPR
- && !TYPE_OVERFLOW_TRAPS (type))
- return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
- fold_convert_loc (loc, type,
- TREE_OPERAND (arg1, 0)));
-
strict_overflow_p = false;
if (TREE_CODE (arg1) == INTEGER_CST
&& 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
return fold_convert_loc (loc, type, tem);
}
- /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
- i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
- if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
- && (TYPE_UNSIGNED (type)
- || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
- {
- tree c = arg1;
- /* Also optimize A % (C << N) where C is a power of 2,
- to A & ((C << N) - 1). */
- if (TREE_CODE (arg1) == LSHIFT_EXPR)
- c = TREE_OPERAND (arg1, 0);
-
- if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
- {
- tree mask
- = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
- build_int_cst (TREE_TYPE (arg1), 1));
- if (strict_overflow_p)
- fold_overflow_warning (("assuming signed overflow does not "
- "occur when simplifying "
- "X % (power of two)"),
- WARN_STRICT_OVERFLOW_MISC);
- return fold_build2_loc (loc, BIT_AND_EXPR, type,
- fold_convert_loc (loc, type, arg0),
- fold_convert_loc (loc, type, mask));
- }
- }
-
return NULL_TREE;
case LROTATE_EXPR:
case RROTATE_EXPR:
- if (integer_all_onesp (arg0))
- return omit_one_operand_loc (loc, type, arg0, arg1);
- goto shift;
-
case RSHIFT_EXPR:
- /* Optimize -1 >> x for arithmetic right shifts. */
- if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
- && tree_expr_nonnegative_p (arg1))
- return omit_one_operand_loc (loc, type, arg0, arg1);
- /* ... fall through ... */
-
case LSHIFT_EXPR:
- shift:
- if (integer_zerop (arg1))
- return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
- if (integer_zerop (arg0))
- return omit_one_operand_loc (loc, type, arg0, arg1);
-
- /* Prefer vector1 << scalar to vector1 << vector2
- if vector2 is uniform. */
- if (VECTOR_TYPE_P (TREE_TYPE (arg1))
- && (tem = uniform_vector_p (arg1)) != NULL_TREE)
- return fold_build2_loc (loc, code, type, op0, tem);
-
/* Since negative shift count is not well-defined,
don't try to compute it in the compiler. */
if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
prec = element_precision (type);
- /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
- if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
- && tree_to_uhwi (arg1) < prec
- && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
- && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
- {
- unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
- + tree_to_uhwi (arg1));
-
- /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
- being well defined. */
- if (low >= prec)
- {
- if (code == LROTATE_EXPR || code == RROTATE_EXPR)
- low = low % prec;
- else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
- return omit_one_operand_loc (loc, type, build_zero_cst (type),
- TREE_OPERAND (arg0, 0));
- else
- low = prec - 1;
- }
-
- return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
- build_int_cst (TREE_TYPE (arg1), low));
- }
-
/* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
into x & ((unsigned)-1 >> c) for unsigned types. */
if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
}
}
- /* Rewrite an LROTATE_EXPR by a constant into an
- RROTATE_EXPR by a new constant. */
- if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
- {
- tree tem = build_int_cst (TREE_TYPE (arg1), prec);
- tem = const_binop (MINUS_EXPR, tem, arg1);
- return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
- }
-
/* If we have a rotate of a bit operation with the rotate count and
the second operand of the bit operation both constant,
permute the two operations. */
prec) == 0)
return TREE_OPERAND (arg0, 0);
- /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
- (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
- if the latter can be further optimized. */
- if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
- && TREE_CODE (arg0) == BIT_AND_EXPR
- && TREE_CODE (arg1) == INTEGER_CST
- && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
- {
- tree mask = fold_build2_loc (loc, code, type,
- fold_convert_loc (loc, type,
- TREE_OPERAND (arg0, 1)),
- arg1);
- tree shift = fold_build2_loc (loc, code, type,
- fold_convert_loc (loc, type,
- TREE_OPERAND (arg0, 0)),
- arg1);
- tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
- if (tem)
- return tem;
- }
-
return NULL_TREE;
case MIN_EXPR:
- if (operand_equal_p (arg0, arg1, 0))
- return omit_one_operand_loc (loc, type, arg0, arg1);
- if (INTEGRAL_TYPE_P (type)
- && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
- return omit_one_operand_loc (loc, type, arg1, arg0);
tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
if (tem)
return tem;
goto associate;
case MAX_EXPR:
- if (operand_equal_p (arg0, arg1, 0))
- return omit_one_operand_loc (loc, type, arg0, arg1);
- if (INTEGRAL_TYPE_P (type)
- && TYPE_MAX_VALUE (type)
- && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
- return omit_one_operand_loc (loc, type, arg1, arg0);
tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
if (tem)
return tem;
if (tem != NULL_TREE)
return tem;
- /* bool_var != 0 becomes bool_var. */
- if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
- && code == NE_EXPR)
- return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
-
- /* bool_var == 1 becomes bool_var. */
- if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
- && code == EQ_EXPR)
- return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
-
/* bool_var != 1 becomes !bool_var. */
if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
&& code == NE_EXPR)
&& code == NE_EXPR)
return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
- /* If this is an equality comparison of the address of two non-weak,
- unaliased symbols neither of which are extern (since we do not
- have access to attributes for externs), then we know the result. */
- if (TREE_CODE (arg0) == ADDR_EXPR
- && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
- && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
- && ! lookup_attribute ("alias",
- DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
- && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
- && TREE_CODE (arg1) == ADDR_EXPR
- && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
- && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
- && ! lookup_attribute ("alias",
- DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
- && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
- {
- /* We know that we're looking at the address of two
- non-weak, unaliased, static _DECL nodes.
-
- It is both wasteful and incorrect to call operand_equal_p
- to compare the two ADDR_EXPR nodes. It is wasteful in that
- all we need to do is test pointer equality for the arguments
- to the two ADDR_EXPR nodes. It is incorrect to use
- operand_equal_p as that function is NOT equivalent to a
- C equality test. It can in fact return false for two
- objects which would test as equal using the C equality
- operator. */
- bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
- return constant_boolean_node (equal
- ? code == EQ_EXPR : code != EQ_EXPR,
- type);
- }
-
- /* Similarly for a NEGATE_EXPR. */
- if (TREE_CODE (arg0) == NEGATE_EXPR
- && TREE_CODE (arg1) == INTEGER_CST
- && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
- arg1)))
- && TREE_CODE (tem) == INTEGER_CST
- && !TREE_OVERFLOW (tem))
- return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
-
/* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
if (TREE_CODE (arg0) == BIT_XOR_EXPR
&& TREE_CODE (arg1) == INTEGER_CST
return omit_one_operand_loc (loc, type, rslt, arg0);
}
- /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
- Similarly for NE_EXPR. */
- if (TREE_CODE (arg0) == BIT_IOR_EXPR
- && TREE_CODE (arg1) == INTEGER_CST
- && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
- {
- tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
- tree candnotd
- = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
- TREE_OPERAND (arg0, 1),
- fold_convert_loc (loc, TREE_TYPE (arg0), notd));
- tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
- if (integer_nonzerop (candnotd))
- return omit_one_operand_loc (loc, type, rslt, arg0);
- }
-
/* If this is a comparison of a field, we may be able to simplify it. */
if ((TREE_CODE (arg0) == COMPONENT_REF
|| TREE_CODE (arg0) == BIT_FIELD_REF)
tree arg00 = TREE_OPERAND (arg0, 0);
tree arg01 = TREE_OPERAND (arg0, 1);
tree itype = TREE_TYPE (arg00);
- if (wi::eq_p (arg01, TYPE_PRECISION (itype) - 1))
+ if (wi::eq_p (arg01, element_precision (itype) - 1))
{
if (TYPE_UNSIGNED (itype))
{
}
}
- /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
- if (integer_zerop (arg1)
- && TREE_CODE (arg0) == BIT_XOR_EXPR)
- return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg0, 1));
-
- /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
- if (TREE_CODE (arg0) == BIT_XOR_EXPR
- && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
- return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
- build_zero_cst (TREE_TYPE (arg0)));
- /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
- if (TREE_CODE (arg0) == BIT_XOR_EXPR
- && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
- && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
- return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
- build_zero_cst (TREE_TYPE (arg0)));
-
- /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
- if (TREE_CODE (arg0) == BIT_XOR_EXPR
- && TREE_CODE (arg1) == INTEGER_CST
- && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
- return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
- fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
- TREE_OPERAND (arg0, 1), arg1));
-
/* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
(X & C) == 0 when C is a single bit. */
if (TREE_CODE (arg0) == BIT_AND_EXPR
return omit_one_operand_loc (loc, type, res, arg0);
}
- /* Fold -X op -Y as X op Y, where op is eq/ne. */
- if (TREE_CODE (arg0) == NEGATE_EXPR
- && TREE_CODE (arg1) == NEGATE_EXPR)
- return fold_build2_loc (loc, code, type,
- TREE_OPERAND (arg0, 0),
- fold_convert_loc (loc, TREE_TYPE (arg0),
- TREE_OPERAND (arg1, 0)));
-
/* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
if (TREE_CODE (arg0) == BIT_AND_EXPR
&& TREE_CODE (arg1) == BIT_AND_EXPR)
if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
&& ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
- && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
+ && !HONOR_SNANS (arg0))
|| (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
&& TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
{
}
/* Convert (X - c) <= X to true. */
- if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
+ if (!HONOR_NANS (arg1)
&& code == LE_EXPR
&& ((code0 == MINUS_EXPR && is_positive >= 0)
|| (code0 == PLUS_EXPR && is_positive <= 0)))
}
/* Convert (X + c) >= X to true. */
- if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
+ if (!HONOR_NANS (arg1)
&& code == GE_EXPR
&& ((code0 == PLUS_EXPR && is_positive >= 0)
|| (code0 == MINUS_EXPR && is_positive <= 0)))
strict_overflow_p = false;
if (code == GE_EXPR
&& (integer_zerop (arg1)
- || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
+ || (! HONOR_NANS (arg0)
&& real_zerop (arg1)))
&& tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
{
&& TYPE_UNSIGNED (TREE_TYPE (arg0))
&& CONVERT_EXPR_P (arg1)
&& TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
- && (TYPE_PRECISION (TREE_TYPE (arg1))
- >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
+ && (element_precision (TREE_TYPE (arg1))
+ >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
&& (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
- || (TYPE_PRECISION (TREE_TYPE (arg1))
- == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
+ || (element_precision (TREE_TYPE (arg1))
+ == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
&& integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
{
tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
: fold_convert_loc (loc, type, arg1);
return pedantic_non_lvalue_loc (loc, tem);
- case COMPLEX_EXPR:
- if ((TREE_CODE (arg0) == REAL_CST
- && TREE_CODE (arg1) == REAL_CST)
- || (TREE_CODE (arg0) == INTEGER_CST
- && TREE_CODE (arg1) == INTEGER_CST))
- return build_complex (type, arg0, arg1);
- if (TREE_CODE (arg0) == REALPART_EXPR
- && TREE_CODE (arg1) == IMAGPART_EXPR
- && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
- && operand_equal_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0), 0))
- return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg1, 0));
- return NULL_TREE;
-
case ASSERT_EXPR:
/* An ASSERT_EXPR should never be passed to fold_binary. */
gcc_unreachable ();
- case VEC_PACK_TRUNC_EXPR:
- case VEC_PACK_FIX_TRUNC_EXPR:
- {
- unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
- tree *elts;
-
- gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
- && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
- if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
- return NULL_TREE;
-
- elts = XALLOCAVEC (tree, nelts);
- if (!vec_cst_ctor_to_array (arg0, elts)
- || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
- return NULL_TREE;
-
- for (i = 0; i < nelts; i++)
- {
- elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
- ? NOP_EXPR : FIX_TRUNC_EXPR,
- TREE_TYPE (type), elts[i]);
- if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
- return NULL_TREE;
- }
-
- return build_vector (type, elts);
- }
-
- case VEC_WIDEN_MULT_LO_EXPR:
- case VEC_WIDEN_MULT_HI_EXPR:
- case VEC_WIDEN_MULT_EVEN_EXPR:
- case VEC_WIDEN_MULT_ODD_EXPR:
- {
- unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
- unsigned int out, ofs, scale;
- tree *elts;
-
- gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
- && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
- if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
- return NULL_TREE;
-
- elts = XALLOCAVEC (tree, nelts * 4);
- if (!vec_cst_ctor_to_array (arg0, elts)
- || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
- return NULL_TREE;
-
- if (code == VEC_WIDEN_MULT_LO_EXPR)
- scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
- else if (code == VEC_WIDEN_MULT_HI_EXPR)
- scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
- else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
- scale = 1, ofs = 0;
- else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
- scale = 1, ofs = 1;
-
- for (out = 0; out < nelts; out++)
- {
- unsigned int in1 = (out << scale) + ofs;
- unsigned int in2 = in1 + nelts * 2;
- tree t1, t2;
-
- t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
- t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
-
- if (t1 == NULL_TREE || t2 == NULL_TREE)
- return NULL_TREE;
- elts[out] = const_binop (MULT_EXPR, t1, t2);
- if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
- return NULL_TREE;
- }
-
- return build_vector (type, elts);
- }
-
default:
return NULL_TREE;
} /* switch (code) */
gcc_assert (IS_EXPR_CODE_CLASS (kind)
&& TREE_CODE_LENGTH (code) == 3);
+ /* If this is a commutative operation, and OP0 is a constant, move it
+ to OP1 to reduce the number of tests below. */
+ if (commutative_ternary_tree_code (code)
+ && tree_swap_operands_p (op0, op1, true))
+ return fold_build3_loc (loc, code, type, op1, op0, op2);
+
+ tem = generic_simplify (loc, code, type, op0, op1, op2);
+ if (tem)
+ return tem;
+
/* Strip any conversions that don't change the mode. This is safe
for every expression, except for a comparison expression because
its signedness is derived from its operands. So, in the latter
}
else if (TREE_CODE (arg0) == VECTOR_CST)
{
- if (integer_all_onesp (arg0))
- return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
- if (integer_zerop (arg0))
- return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
-
if ((TREE_CODE (arg1) == VECTOR_CST
|| TREE_CODE (arg1) == CONSTRUCTOR)
&& (TREE_CODE (arg2) == VECTOR_CST
}
}
- if (operand_equal_p (arg1, op2, 0))
- return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
-
/* If we have A op B ? A : C, we may be able to convert this to a
simpler expression, depending on the operation and the values
of B and C. Signed zeros prevent all of these transformations,
if (COMPARISON_CLASS_P (arg0)
&& operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
arg1, TREE_OPERAND (arg0, 1))
- && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
+ && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
{
tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
if (tem)
&& operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
op2,
TREE_OPERAND (arg0, 1))
- && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
+ && !HONOR_SIGNED_ZEROS (element_mode (op2)))
{
location_t loc0 = expr_location_or (arg0, loc);
tem = fold_invert_truthvalue (loc0, arg0);
case VEC_PERM_EXPR:
if (TREE_CODE (arg2) == VECTOR_CST)
{
- unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
- unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
+ unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
+ unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
+ unsigned char *sel2 = sel + nelts;
bool need_mask_canon = false;
+ bool need_mask_canon2 = false;
bool all_in_vec0 = true;
bool all_in_vec1 = true;
bool maybe_identity = true;
bool single_arg = (op0 == op1);
bool changed = false;
- mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
+ mask2 = 2 * nelts - 1;
+ mask = single_arg ? (nelts - 1) : mask2;
gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
for (i = 0; i < nelts; i++)
{
/* Make sure that the perm value is in an acceptable
range. */
wide_int t = val;
- if (wi::gtu_p (t, mask))
- {
- need_mask_canon = true;
- sel[i] = t.to_uhwi () & mask;
- }
- else
- sel[i] = t.to_uhwi ();
+ need_mask_canon |= wi::gtu_p (t, mask);
+ need_mask_canon2 |= wi::gtu_p (t, mask2);
+ sel[i] = t.to_uhwi () & mask;
+ sel2[i] = t.to_uhwi () & mask2;
if (sel[i] < nelts)
all_in_vec1 = false;
if (op0 == op1 && !single_arg)
changed = true;
+ /* Some targets are deficient and fail to expand a single
+ argument permutation while still allowing an equivalent
+ 2-argument version. */
+ if (need_mask_canon && arg2 == op2
+ && !can_vec_perm_p (TYPE_MODE (type), false, sel)
+ && can_vec_perm_p (TYPE_MODE (type), false, sel2))
+ {
+ need_mask_canon = need_mask_canon2;
+ sel = sel2;
+ }
+
if (need_mask_canon && arg2 == op2)
{
tree *tsel = XALLOCAVEC (tree, nelts);
#undef fold
static void fold_checksum_tree (const_tree, struct md5_ctx *,
- hash_table<pointer_hash<const tree_node> > *);
+ hash_table<nofree_ptr_hash<const tree_node> > *);
static void fold_check_failed (const_tree, const_tree);
void print_fold_checksum (const_tree);
tree ret;
struct md5_ctx ctx;
unsigned char checksum_before[16], checksum_after[16];
- hash_table<pointer_hash<const tree_node> > ht (32);
+ hash_table<nofree_ptr_hash<const tree_node> > ht (32);
md5_init_ctx (&ctx);
fold_checksum_tree (expr, &ctx, &ht);
{
struct md5_ctx ctx;
unsigned char checksum[16], cnt;
- hash_table<pointer_hash<const tree_node> > ht (32);
+ hash_table<nofree_ptr_hash<const tree_node> > ht (32);
md5_init_ctx (&ctx);
fold_checksum_tree (expr, &ctx, &ht);
static void
fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
- hash_table<pointer_hash <const tree_node> > *ht)
+ hash_table<nofree_ptr_hash <const tree_node> > *ht)
{
const tree_node **slot;
enum tree_code code;
*slot = expr;
code = TREE_CODE (expr);
if (TREE_CODE_CLASS (code) == tcc_declaration
- && DECL_ASSEMBLER_NAME_SET_P (expr))
+ && HAS_DECL_ASSEMBLER_NAME_P (expr))
{
- /* Allow DECL_ASSEMBLER_NAME to be modified. */
+ /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
memcpy ((char *) &buf, expr, tree_size (expr));
SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
+ buf.decl_with_vis.symtab_node = NULL;
expr = (tree) &buf;
}
else if (TREE_CODE_CLASS (code) == tcc_type
int i;
unsigned char checksum[16];
struct md5_ctx ctx;
- hash_table<pointer_hash<const tree_node> > ht (32);
+ hash_table<nofree_ptr_hash<const tree_node> > ht (32);
md5_init_ctx (&ctx);
fold_checksum_tree (t, &ctx, &ht);
#ifdef ENABLE_FOLD_CHECKING
unsigned char checksum_before[16], checksum_after[16];
struct md5_ctx ctx;
- hash_table<pointer_hash<const tree_node> > ht (32);
+ hash_table<nofree_ptr_hash<const tree_node> > ht (32);
md5_init_ctx (&ctx);
fold_checksum_tree (op0, &ctx, &ht);
checksum_after_op0[16],
checksum_after_op1[16];
struct md5_ctx ctx;
- hash_table<pointer_hash<const tree_node> > ht (32);
+ hash_table<nofree_ptr_hash<const tree_node> > ht (32);
md5_init_ctx (&ctx);
fold_checksum_tree (op0, &ctx, &ht);
checksum_after_op1[16],
checksum_after_op2[16];
struct md5_ctx ctx;
- hash_table<pointer_hash<const tree_node> > ht (32);
+ hash_table<nofree_ptr_hash<const tree_node> > ht (32);
md5_init_ctx (&ctx);
fold_checksum_tree (op0, &ctx, &ht);
checksum_after_fn[16],
checksum_after_arglist[16];
struct md5_ctx ctx;
- hash_table<pointer_hash<const tree_node> > ht (32);
+ hash_table<nofree_ptr_hash<const tree_node> > ht (32);
int i;
md5_init_ctx (&ctx);
#endif
tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
+ if (!tem)
+ tem = build_call_array_loc (loc, type, fn, nargs, argarray);
#ifdef ENABLE_FOLD_CHECKING
md5_init_ctx (&ctx);
case ABS_EXPR:
/* We can't return 1 if flag_wrapv is set because
ABS_EXPR<INT_MIN> = INT_MIN. */
- if (!INTEGRAL_TYPE_P (type))
+ if (!ANY_INTEGRAL_TYPE_P (type))
return true;
if (TYPE_OVERFLOW_UNDEFINED (type))
{
return tree_expr_nonnegative_warnv_p (op0,
strict_overflow_p);
- case NOP_EXPR:
+ CASE_CONVERT:
{
tree inner_type = TREE_TYPE (op0);
tree outer_type = type;
|| (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
&& tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
{
- if (TYPE_OVERFLOW_UNDEFINED (type))
+ if (ANY_INTEGRAL_TYPE_P (type)
+ && TYPE_OVERFLOW_UNDEFINED (type))
*strict_overflow_p = true;
return true;
}
CASE_FLT_FN (BUILT_IN_SQRT):
/* sqrt(-0.0) is -0.0. */
- if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
+ if (!HONOR_SIGNED_ZEROS (element_mode (type)))
return true;
return tree_expr_nonnegative_warnv_p (arg0,
strict_overflow_p);
{
case POINTER_PLUS_EXPR:
case PLUS_EXPR:
- if (TYPE_OVERFLOW_UNDEFINED (type))
+ if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
{
/* With the presence of negative values it is hard
to say something. */
{
struct symtab_node *symbol;
- symbol = symtab_node::get (base);
+ symbol = symtab_node::get_create (base);
if (symbol)
return symbol->nonzero_address ();
else
if (code == LE_EXPR || code == GT_EXPR)
{
- tree tem = op0;
- op0 = op1;
- op1 = tem;
+ std::swap (op0, op1);
code = swap_tree_comparison (code);
}
return value;
overflow_p = TREE_OVERFLOW (value);
- val &= ~(divisor - 1);
- val += divisor;
+ val += divisor - 1;
+ val &= - (int) divisor;
if (val == 0)
overflow_p = true;
t = build_int_cst (TREE_TYPE (value), divisor - 1);
value = size_binop_loc (loc, PLUS_EXPR, value, t);
- t = build_int_cst (TREE_TYPE (value), -divisor);
+ t = build_int_cst (TREE_TYPE (value), - (int) divisor);
value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
}
}
HOST_WIDE_INT *pbitpos, tree *poffset)
{
tree core;
- enum machine_mode mode;
+ machine_mode mode;
int unsignedp, volatilep;
HOST_WIDE_INT bitsize;
location_t loc = EXPR_LOCATION (exp);
case MULT_EXPR:
case RDIV_EXPR:
- if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
+ if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp)))
return NULL_TREE;
arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
}
return NULL_TREE;
}
+
+/* Return OFF converted to a pointer offset type suitable as offset for
+ POINTER_PLUS_EXPR. Use location LOC for this conversion. */
+tree
+convert_to_ptrofftype_loc (location_t loc, tree off)
+{
+ return fold_convert_loc (loc, sizetype, off);
+}
+
+/* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
+tree
+fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
+{
+ return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
+ ptr, convert_to_ptrofftype_loc (loc, off));
+}
+
+/* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
+tree
+fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
+{
+ return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
+ ptr, size_int (off));
+}