static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
static enum comparison_code comparison_to_compcode (enum tree_code);
static enum tree_code compcode_to_comparison (enum comparison_code);
-static int twoval_comparison_p (tree, tree *, tree *);
+static bool twoval_comparison_p (tree, tree *, tree *);
static tree eval_subst (location_t, tree, tree, tree, tree, tree);
static tree optimize_bit_field_compare (location_t, enum tree_code,
tree, tree, tree);
-static int simple_operand_p (const_tree);
+static bool simple_operand_p (const_tree);
static bool simple_operand_p_2 (tree);
static tree range_binop (enum tree_code, tree, tree, int, tree, int);
static tree range_predecessor (tree);
CASE_CFN_LLROUND:
CASE_CFN_LROUND:
CASE_CFN_ROUND:
+ CASE_CFN_ROUNDEVEN:
+ CASE_CFN_ROUNDEVEN_FN:
CASE_CFN_SIN:
CASE_CFN_SINH:
CASE_CFN_TAN:
int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
int overflowable)
{
- bool success = false;
poly_wide_int poly_res;
tree type = TREE_TYPE (arg1);
signop sign = TYPE_SIGN (type);
{
wide_int warg1 = wi::to_wide (arg1), res;
wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
- success = wide_int_binop (res, code, warg1, warg2, sign, &overflow);
+ if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
+ return NULL_TREE;
poly_res = res;
}
- else if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
- success = poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow);
- if (success)
- return force_fit_type (type, poly_res, overflowable,
- (((sign == SIGNED || overflowable == -1)
- && overflow)
- | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
- return NULL_TREE;
+ else if (!poly_int_tree_p (arg1)
+ || !poly_int_tree_p (arg2)
+ || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
+ return NULL_TREE;
+ return force_fit_type (type, poly_res, overflowable,
+ (((sign == SIGNED || overflowable == -1)
+ && overflow)
+ | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
}
/* Return true if binary operation OP distributes over addition in operand
addresses with TREE_CONSTANT flag set so we know that &var == &var
even if var is volatile. */
-int
-operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
+bool
+operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
+ unsigned int flags)
{
- /* When checking, verify at the outermost operand_equal_p call that
- if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
- hash value. */
- if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
- {
- if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
- {
- if (arg0 != arg1)
- {
- inchash::hash hstate0 (0), hstate1 (0);
- inchash::add_expr (arg0, hstate0, flags | OEP_HASH_CHECK);
- inchash::add_expr (arg1, hstate1, flags | OEP_HASH_CHECK);
- hashval_t h0 = hstate0.end ();
- hashval_t h1 = hstate1.end ();
- gcc_assert (h0 == h1);
- }
- return 1;
- }
- else
- return 0;
- }
+ bool r;
+ if (verify_hash_value (arg0, arg1, flags, &r))
+ return r;
STRIP_ANY_LOCATION_WRAPPER (arg0);
STRIP_ANY_LOCATION_WRAPPER (arg1);
if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
|| TREE_TYPE (arg0) == error_mark_node
|| TREE_TYPE (arg1) == error_mark_node)
- return 0;
+ return false;
+
+ /* Similar, if either does not have a type (like a template id),
+ they aren't equal. */
+ if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
+ return false;
/* We cannot consider pointers to different address space equal. */
if (POINTER_TYPE_P (TREE_TYPE (arg0))
&& POINTER_TYPE_P (TREE_TYPE (arg1))
&& (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
!= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
- return 0;
+ return false;
/* Check equality of integer constants before bailing out due to
precision differences. */
if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
|| POINTER_TYPE_P (TREE_TYPE (arg0))
!= POINTER_TYPE_P (TREE_TYPE (arg1)))
- return 0;
+ return false;
/* If both types don't have the same precision, then it is not safe
to strip NOPs. */
if (element_precision (TREE_TYPE (arg0))
!= element_precision (TREE_TYPE (arg1)))
- return 0;
+ return false;
STRIP_NOPS (arg0);
STRIP_NOPS (arg1);
&& TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
&& TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
&& integer_zerop (TREE_OPERAND (arg0, 1)))
- return 1;
+ return true;
else if (TREE_CODE (arg1) == MEM_REF
&& DECL_P (arg0)
&& TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
&& TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
&& integer_zerop (TREE_OPERAND (arg1, 1)))
- return 1;
- return 0;
+ return true;
+ return false;
}
else
- return 0;
+ return false;
}
/* When not checking adddresses, this is needed for conversions and for
|| TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
|| (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
&& !(flags & OEP_ADDRESS_OF)))
- return 0;
+ return false;
/* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
We don't care about side effects in that case because the SAVE_EXPR
&& (TREE_CODE (arg0) == SAVE_EXPR
|| (flags & OEP_MATCH_SIDE_EFFECTS)
|| (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
- return 1;
+ return true;
/* Next handle constant cases, those for which we can return 1 even
if ONLY_CONST is set. */
case REAL_CST:
if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
- return 1;
+ return true;
if (!HONOR_SIGNED_ZEROS (arg0))
/* If we do not distinguish between signed and unsigned zero,
consider them equal. */
if (real_zerop (arg0) && real_zerop (arg1))
- return 1;
+ return true;
}
- return 0;
+ return false;
case VECTOR_CST:
{
if (VECTOR_CST_LOG2_NPATTERNS (arg0)
!= VECTOR_CST_LOG2_NPATTERNS (arg1))
- return 0;
+ return false;
if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
!= VECTOR_CST_NELTS_PER_PATTERN (arg1))
- return 0;
+ return false;
unsigned int count = vector_cst_encoded_nelts (arg0);
for (unsigned int i = 0; i < count; ++i)
if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
VECTOR_CST_ENCODED_ELT (arg1, i), flags))
- return 0;
- return 1;
+ return false;
+ return true;
}
case COMPLEX_CST:
}
if (flags & OEP_ONLY_CONST)
- return 0;
+ return false;
/* Define macros to test an operand from arg0 and arg1 for equality and a
variant that allows null and views null as being different from any
case FIX_TRUNC_EXPR:
if (TYPE_UNSIGNED (TREE_TYPE (arg0))
!= TYPE_UNSIGNED (TREE_TYPE (arg1)))
- return 0;
+ return false;
break;
default:
break;
case tcc_comparison:
case tcc_binary:
if (OP_SAME (0) && OP_SAME (1))
- return 1;
+ return true;
/* For commutative ops, allow the other order. */
return (commutative_tree_code (TREE_CODE (arg0))
if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
&& (TREE_SIDE_EFFECTS (arg0)
|| TREE_SIDE_EFFECTS (arg1)))
- return 0;
+ return false;
switch (TREE_CODE (arg0))
{
{
if (TYPE_ALIGN (TREE_TYPE (arg0))
!= TYPE_ALIGN (TREE_TYPE (arg1)))
- return 0;
+ return false;
/* Verify that the access types are compatible. */
if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
!= TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
- return 0;
+ return false;
}
flags &= ~OEP_ADDRESS_OF;
return OP_SAME (0);
if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
TYPE_SIZE (TREE_TYPE (arg1)),
flags & ~OEP_ADDRESS_OF))
- return 0;
+ return false;
/* Fallthru. */
case REALPART_EXPR:
|| !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
TYPE_SIZE (TREE_TYPE (arg1)),
flags)))
- return 0;
+ return false;
/* Verify that access happens in similar types. */
if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
- return 0;
+ return false;
/* Verify that accesses are TBAA compatible. */
if (!alias_ptr_types_compatible_p
(TREE_TYPE (TREE_OPERAND (arg0, 1)),
!= MR_DEPENDENCE_CLIQUE (arg1))
|| (MR_DEPENDENCE_BASE (arg0)
!= MR_DEPENDENCE_BASE (arg1)))
- return 0;
+ return false;
/* Verify that alignment is compatible. */
if (TYPE_ALIGN (TREE_TYPE (arg0))
!= TYPE_ALIGN (TREE_TYPE (arg1)))
- return 0;
+ return false;
}
flags &= ~OEP_ADDRESS_OF;
return (OP_SAME (0) && OP_SAME (1)
case ARRAY_REF:
case ARRAY_RANGE_REF:
if (!OP_SAME (0))
- return 0;
+ return false;
flags &= ~OEP_ADDRESS_OF;
/* Compare the array index by value if it is constant first as we
may have different types but same value here. */
may be NULL when we're called to compare MEM_EXPRs. */
if (!OP_SAME_WITH_NULL (0)
|| !OP_SAME (1))
- return 0;
+ return false;
flags &= ~OEP_ADDRESS_OF;
return OP_SAME_WITH_NULL (2);
case BIT_FIELD_REF:
if (!OP_SAME (0))
- return 0;
+ return false;
flags &= ~OEP_ADDRESS_OF;
return OP_SAME (1) && OP_SAME (2);
+ /* Virtual table call. */
+ case OBJ_TYPE_REF:
+ {
+ if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
+ OBJ_TYPE_REF_EXPR (arg1), flags))
+ return false;
+ if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
+ != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
+ return false;
+ if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
+ OBJ_TYPE_REF_OBJECT (arg1), flags))
+ return false;
+ if (!types_same_for_odr (obj_type_ref_class (arg0),
+ obj_type_ref_class (arg1)))
+ return false;
+ return true;
+ }
+
default:
- return 0;
+ return false;
}
case tcc_expression:
case WIDEN_MULT_PLUS_EXPR:
case WIDEN_MULT_MINUS_EXPR:
if (!OP_SAME (2))
- return 0;
+ return false;
/* The multiplcation operands are commutative. */
/* FALLTHRU */
case TRUTH_OR_EXPR:
case TRUTH_XOR_EXPR:
if (OP_SAME (0) && OP_SAME (1))
- return 1;
+ return true;
/* Otherwise take into account this is a commutative operation. */
return (operand_equal_p (TREE_OPERAND (arg0, 0),
case COND_EXPR:
if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
- return 0;
+ return false;
flags &= ~OEP_ADDRESS_OF;
return OP_SAME (0);
case POSTINCREMENT_EXPR:
if (flags & OEP_LEXICOGRAPHIC)
return OP_SAME (0) && OP_SAME (1);
- return 0;
+ return false;
case CLEANUP_POINT_EXPR:
case EXPR_STMT:
case SAVE_EXPR:
if (flags & OEP_LEXICOGRAPHIC)
return OP_SAME (0);
- return 0;
+ return false;
default:
- return 0;
+ return false;
}
case tcc_vl_exp:
!= (CALL_EXPR_FN (arg1) == NULL_TREE))
/* If not both CALL_EXPRs are either internal or normal function
functions, then they are not equal. */
- return 0;
+ return false;
else if (CALL_EXPR_FN (arg0) == NULL_TREE)
{
/* If the CALL_EXPRs call different internal functions, then they
are not equal. */
if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
- return 0;
+ return false;
}
else
{
equal. */
if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
flags))
- return 0;
+ return false;
}
/* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
else
cef &= ECF_CONST;
if (!cef && !(flags & OEP_LEXICOGRAPHIC))
- return 0;
+ return false;
}
/* Now see if all the arguments are the same. */
a0 = next_const_call_expr_arg (&iter0),
a1 = next_const_call_expr_arg (&iter1))
if (! operand_equal_p (a0, a1, flags))
- return 0;
+ return false;
/* If we get here and both argument lists are exhausted
then the CALL_EXPRs are equal. */
return ! (a0 || a1);
}
default:
- return 0;
+ return false;
}
case tcc_declaration:
return (TREE_CODE (arg0) == FUNCTION_DECL
&& fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
&& DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
- && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
+ && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
+ == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
case tcc_exceptional:
if (TREE_CODE (arg0) == CONSTRUCTOR)
{
+ if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
+ return false;
+
/* In GIMPLE constructors are used only to build vectors from
elements. Individual elements in the constructor must be
indexed in increasing order and form an initial sequence.
constants). */
if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
|| !VECTOR_TYPE_P (TREE_TYPE (arg1)))
- return 0;
+ return false;
/* Be sure that vectors constructed have the same representation.
We only tested element precision and modes to match.
parts match. */
if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
- return 0;
+ return false;
vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
unsigned int len = vec_safe_length (v0);
if (len != vec_safe_length (v1))
- return 0;
+ return false;
for (unsigned int i = 0; i < len; i++)
{
positives for GENERIC. */
|| (c0->index
&& (TREE_CODE (c0->index) != INTEGER_CST
- || !compare_tree_int (c0->index, i)))
+ || compare_tree_int (c0->index, i)))
|| (c1->index
&& (TREE_CODE (c1->index) != INTEGER_CST
- || !compare_tree_int (c1->index, i))))
- return 0;
+ || compare_tree_int (c1->index, i))))
+ return false;
}
- return 1;
+ return true;
}
else if (TREE_CODE (arg0) == STATEMENT_LIST
&& (flags & OEP_LEXICOGRAPHIC))
{
/* The lists don't have the same number of statements. */
if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
- return 0;
+ return false;
if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
- return 1;
+ return true;
if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
flags & (OEP_LEXICOGRAPHIC
| OEP_NO_HASH_CHECK)))
- return 0;
+ return false;
}
}
- return 0;
+ return false;
case tcc_statement:
switch (TREE_CODE (arg0))
case RETURN_EXPR:
if (flags & OEP_LEXICOGRAPHIC)
return OP_SAME_WITH_NULL (0);
- return 0;
+ return false;
case DEBUG_BEGIN_STMT:
if (flags & OEP_LEXICOGRAPHIC)
- return 1;
- return 0;
+ return true;
+ return false;
default:
- return 0;
+ return false;
}
default:
- return 0;
+ return false;
}
#undef OP_SAME
#undef OP_SAME_WITH_NULL
+}
+
+/* Generate a hash value for an expression. This can be used iteratively
+ by passing a previous result as the HSTATE argument. */
+
+void
+operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
+ unsigned int flags)
+{
+ int i;
+ enum tree_code code;
+ enum tree_code_class tclass;
+
+ if (t == NULL_TREE || t == error_mark_node)
+ {
+ hstate.merge_hash (0);
+ return;
+ }
+
+ STRIP_ANY_LOCATION_WRAPPER (t);
+
+ if (!(flags & OEP_ADDRESS_OF))
+ STRIP_NOPS (t);
+
+ code = TREE_CODE (t);
+
+ switch (code)
+ {
+ /* Alas, constants aren't shared, so we can't rely on pointer
+ identity. */
+ case VOID_CST:
+ hstate.merge_hash (0);
+ return;
+ case INTEGER_CST:
+ gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
+ for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
+ hstate.add_hwi (TREE_INT_CST_ELT (t, i));
+ return;
+ case REAL_CST:
+ {
+ unsigned int val2;
+ if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
+ val2 = rvc_zero;
+ else
+ val2 = real_hash (TREE_REAL_CST_PTR (t));
+ hstate.merge_hash (val2);
+ return;
+ }
+ case FIXED_CST:
+ {
+ unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
+ hstate.merge_hash (val2);
+ return;
+ }
+ case STRING_CST:
+ hstate.add ((const void *) TREE_STRING_POINTER (t),
+ TREE_STRING_LENGTH (t));
+ return;
+ case COMPLEX_CST:
+ hash_operand (TREE_REALPART (t), hstate, flags);
+ hash_operand (TREE_IMAGPART (t), hstate, flags);
+ return;
+ case VECTOR_CST:
+ {
+ hstate.add_int (VECTOR_CST_NPATTERNS (t));
+ hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
+ unsigned int count = vector_cst_encoded_nelts (t);
+ for (unsigned int i = 0; i < count; ++i)
+ hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
+ return;
+ }
+ case SSA_NAME:
+ /* We can just compare by pointer. */
+ hstate.add_hwi (SSA_NAME_VERSION (t));
+ return;
+ case PLACEHOLDER_EXPR:
+ /* The node itself doesn't matter. */
+ return;
+ case BLOCK:
+ case OMP_CLAUSE:
+ /* Ignore. */
+ return;
+ case TREE_LIST:
+ /* A list of expressions, for a CALL_EXPR or as the elements of a
+ VECTOR_CST. */
+ for (; t; t = TREE_CHAIN (t))
+ hash_operand (TREE_VALUE (t), hstate, flags);
+ return;
+ case CONSTRUCTOR:
+ {
+ unsigned HOST_WIDE_INT idx;
+ tree field, value;
+ flags &= ~OEP_ADDRESS_OF;
+ hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
+ FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
+ {
+ /* In GIMPLE the indexes can be either NULL or matching i. */
+ if (field == NULL_TREE)
+ field = bitsize_int (idx);
+ hash_operand (field, hstate, flags);
+ hash_operand (value, hstate, flags);
+ }
+ return;
+ }
+ case STATEMENT_LIST:
+ {
+ tree_stmt_iterator i;
+ for (i = tsi_start (CONST_CAST_TREE (t));
+ !tsi_end_p (i); tsi_next (&i))
+ hash_operand (tsi_stmt (i), hstate, flags);
+ return;
+ }
+ case TREE_VEC:
+ for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
+ hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
+ return;
+ case IDENTIFIER_NODE:
+ hstate.add_object (IDENTIFIER_HASH_VALUE (t));
+ return;
+ case FUNCTION_DECL:
+ /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
+ Otherwise nodes that compare equal according to operand_equal_p might
+ get different hash codes. However, don't do this for machine specific
+ or front end builtins, since the function code is overloaded in those
+ cases. */
+ if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
+ && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
+ {
+ t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
+ code = TREE_CODE (t);
+ }
+ /* FALL THROUGH */
+ default:
+ if (POLY_INT_CST_P (t))
+ {
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
+ return;
+ }
+ tclass = TREE_CODE_CLASS (code);
+
+ if (tclass == tcc_declaration)
+ {
+ /* DECL's have a unique ID */
+ hstate.add_hwi (DECL_UID (t));
+ }
+ else if (tclass == tcc_comparison && !commutative_tree_code (code))
+ {
+ /* For comparisons that can be swapped, use the lower
+ tree code. */
+ enum tree_code ccode = swap_tree_comparison (code);
+ if (code < ccode)
+ ccode = code;
+ hstate.add_object (ccode);
+ hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
+ hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
+ }
+ else if (CONVERT_EXPR_CODE_P (code))
+ {
+ /* NOP_EXPR and CONVERT_EXPR are considered equal by
+ operand_equal_p. */
+ enum tree_code ccode = NOP_EXPR;
+ hstate.add_object (ccode);
+
+ /* Don't hash the type, that can lead to having nodes which
+ compare equal according to operand_equal_p, but which
+ have different hash codes. Make sure to include signedness
+ in the hash computation. */
+ hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
+ hash_operand (TREE_OPERAND (t, 0), hstate, flags);
+ }
+ /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
+ else if (code == MEM_REF
+ && (flags & OEP_ADDRESS_OF) != 0
+ && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
+ && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
+ && integer_zerop (TREE_OPERAND (t, 1)))
+ hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
+ hstate, flags);
+ /* Don't ICE on FE specific trees, or their arguments etc.
+ during operand_equal_p hash verification. */
+ else if (!IS_EXPR_CODE_CLASS (tclass))
+ gcc_assert (flags & OEP_HASH_CHECK);
+ else
+ {
+ unsigned int sflags = flags;
+
+ hstate.add_object (code);
+
+ switch (code)
+ {
+ case ADDR_EXPR:
+ gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
+ flags |= OEP_ADDRESS_OF;
+ sflags = flags;
+ break;
+
+ case INDIRECT_REF:
+ case MEM_REF:
+ case TARGET_MEM_REF:
+ flags &= ~OEP_ADDRESS_OF;
+ sflags = flags;
+ break;
+
+ case ARRAY_REF:
+ case ARRAY_RANGE_REF:
+ case COMPONENT_REF:
+ case BIT_FIELD_REF:
+ sflags &= ~OEP_ADDRESS_OF;
+ break;
+
+ case COND_EXPR:
+ flags &= ~OEP_ADDRESS_OF;
+ break;
+
+ case WIDEN_MULT_PLUS_EXPR:
+ case WIDEN_MULT_MINUS_EXPR:
+ {
+ /* The multiplication operands are commutative. */
+ inchash::hash one, two;
+ hash_operand (TREE_OPERAND (t, 0), one, flags);
+ hash_operand (TREE_OPERAND (t, 1), two, flags);
+ hstate.add_commutative (one, two);
+ hash_operand (TREE_OPERAND (t, 2), two, flags);
+ return;
+ }
+
+ case CALL_EXPR:
+ if (CALL_EXPR_FN (t) == NULL_TREE)
+ hstate.add_int (CALL_EXPR_IFN (t));
+ break;
+
+ case TARGET_EXPR:
+ /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
+ Usually different TARGET_EXPRs just should use
+ different temporaries in their slots. */
+ hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
+ return;
+
+ /* Virtual table call. */
+ case OBJ_TYPE_REF:
+ inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
+ inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
+ inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
+ return;
+ default:
+ break;
+ }
+
+ /* Don't hash the type, that can lead to having nodes which
+ compare equal according to operand_equal_p, but which
+ have different hash codes. */
+ if (code == NON_LVALUE_EXPR)
+ {
+ /* Make sure to include signness in the hash computation. */
+ hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
+ hash_operand (TREE_OPERAND (t, 0), hstate, flags);
+ }
+
+ else if (commutative_tree_code (code))
+ {
+ /* It's a commutative expression. We want to hash it the same
+ however it appears. We do this by first hashing both operands
+ and then rehashing based on the order of their independent
+ hashes. */
+ inchash::hash one, two;
+ hash_operand (TREE_OPERAND (t, 0), one, flags);
+ hash_operand (TREE_OPERAND (t, 1), two, flags);
+ hstate.add_commutative (one, two);
+ }
+ else
+ for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
+ hash_operand (TREE_OPERAND (t, i), hstate,
+ i == 0 ? flags : sflags);
+ }
+ return;
+ }
+}
+
+bool
+operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
+ unsigned int flags, bool *ret)
+{
+ /* When checking, verify at the outermost operand_equal_p call that
+ if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
+ hash value. */
+ if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
+ {
+ if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
+ {
+ if (arg0 != arg1)
+ {
+ inchash::hash hstate0 (0), hstate1 (0);
+ hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
+ hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
+ hashval_t h0 = hstate0.end ();
+ hashval_t h1 = hstate1.end ();
+ gcc_assert (h0 == h1);
+ }
+ *ret = true;
+ }
+ else
+ *ret = false;
+
+ return true;
+ }
+
+ return false;
+}
+
+
+static operand_compare default_compare_instance;
+
+/* Conveinece wrapper around operand_compare class because usually we do
+ not need to play with the valueizer. */
+
+bool
+operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
+{
+ return default_compare_instance.operand_equal_p (arg0, arg1, flags);
+}
+
+namespace inchash
+{
+
+/* Generate a hash value for an expression. This can be used iteratively
+ by passing a previous result as the HSTATE argument.
+
+ This function is intended to produce the same hash for expressions which
+ would compare equal using operand_equal_p. */
+void
+add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
+{
+ default_compare_instance.hash_operand (t, hstate, flags);
+}
+
}
\f
/* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
If this is true, return 1. Otherwise, return zero. */
-static int
+static bool
twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
{
enum tree_code code = TREE_CODE (arg);
&& twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
case tcc_constant:
- return 1;
+ return true;
case tcc_expression:
if (code == COND_EXPR)
return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
&& twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
&& twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
- return 0;
+ return false;
case tcc_comparison:
/* First see if we can handle the first operand, then the second. For
if (operand_equal_p (TREE_OPERAND (arg, 0),
TREE_OPERAND (arg, 1), 0))
- return 0;
+ return false;
if (*cval1 == 0)
*cval1 = TREE_OPERAND (arg, 0);
else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
;
else
- return 0;
+ return false;
if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
;
else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
;
else
- return 0;
+ return false;
- return 1;
+ return true;
default:
- return 0;
+ return false;
}
}
\f
/* Return nonzero if MASK represents a mask of SIZE ones in the low-order
bit positions and MASK is SIGNED. */
-static int
+static bool
all_ones_mask_p (const_tree mask, unsigned int size)
{
tree type = TREE_TYPE (mask);
/* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
to be evaluated unconditionally. */
-static int
+static bool
simple_operand_p (const_tree exp)
{
/* Strip any conversions that don't change the machine mode. */
{
enum tree_code code;
- if (TREE_SIDE_EFFECTS (exp)
- || tree_could_trap_p (exp))
+ if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
return false;
while (CONVERT_EXPR_P (exp))
/* First make sure that arithmetics in this type is valid, then make sure
that it wraps around. */
if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
- etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
- TYPE_UNSIGNED (etype));
+ etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
- if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
+ if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
{
tree utype, minv, maxv;
else
return NULL_TREE;
}
+ else if (POINTER_TYPE_P (etype))
+ etype = unsigned_type_for (etype);
return etype;
}
if (etype == NULL_TREE)
return NULL_TREE;
- if (POINTER_TYPE_P (etype))
- etype = unsigned_type_for (etype);
-
high = fold_convert_loc (loc, etype, high);
low = fold_convert_loc (loc, etype, low);
exp = fold_convert_loc (loc, etype, exp);
apply the distributive law to commute the multiply and addition
if the multiplication of the constants doesn't overflow
and overflow is defined. With undefined overflow
- op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
- if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
+ op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
+ But fold_plusminus_mult_expr would factor back any power-of-two
+ value so do not distribute in the first place in this case. */
+ if (code == MULT_EXPR
+ && TYPE_OVERFLOW_WRAPS (ctype)
+ && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
return fold_build2 (tcode, ctype,
fold_build2 (code, ctype,
fold_convert (ctype, op0),
return false;
/* Don't allow the fold with -fsignaling-nans. */
- if (HONOR_SNANS (element_mode (type)))
+ if (HONOR_SNANS (type))
return false;
/* Allow the fold if zeros aren't signed, or their sign isn't important. */
- if (!HONOR_SIGNED_ZEROS (element_mode (type)))
+ if (!HONOR_SIGNED_ZEROS (type))
return true;
+ /* There is no case that is safe for all rounding modes. */
+ if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
+ return false;
+
/* In a vector or complex, we would need to check the sign of all zeros. */
- if (TREE_CODE (addend) != REAL_CST)
+ if (TREE_CODE (addend) == VECTOR_CST)
+ addend = uniform_vector_p (addend);
+ if (!addend || TREE_CODE (addend) != REAL_CST)
return false;
/* Treat x + -0 as x - 0 and x - -0 as x + 0. */
/* The mode has signed zeros, and we have to honor their sign.
In this situation, there is only one case we can return true for.
- X - 0 is the same as X unless rounding towards -infinity is
- supported. */
- return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
+ X - 0 is the same as X with default rounding. */
+ return negate;
}
/* Subroutine of match.pd that optimizes comparisons of a division by
/* No identical multiplicands; see if we can find a common
power-of-two factor in non-power-of-two multiplies. This
can help in multi-dimensional array access. */
- else if (tree_fits_shwi_p (arg01)
- && tree_fits_shwi_p (arg11))
+ else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
{
- HOST_WIDE_INT int01, int11, tmp;
+ HOST_WIDE_INT int01 = tree_to_shwi (arg01);
+ HOST_WIDE_INT int11 = tree_to_shwi (arg11);
+ HOST_WIDE_INT tmp;
bool swap = false;
tree maybe_same;
- int01 = tree_to_shwi (arg01);
- int11 = tree_to_shwi (arg11);
/* Move min of absolute values to int11. */
if (absu_hwi (int01) < absu_hwi (int11))
else
maybe_same = arg11;
- if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
+ const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
+ if (factor > 1
+ && pow2p_hwi (factor)
+ && (int01 & (factor - 1)) == 0
/* The remainder should not be a constant, otherwise we
end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
increased the number of multiplications necessary. */
selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
NULL_TREE otherwise. */
-static tree
+tree
fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
{
unsigned int i;
tree fndecl = get_callee_fndecl (t);
if (!fndecl) return false;
if (flag_delete_null_pointer_checks && !flag_check_new
- && DECL_IS_OPERATOR_NEW (fndecl)
+ && DECL_IS_OPERATOR_NEW_P (fndecl)
&& !TREE_NOTHROW (fndecl))
return true;
if (flag_delete_null_pointer_checks
return NULL_TREE;
case VEC_PERM_EXPR:
- if (TREE_CODE (arg2) == VECTOR_CST)
+ /* Perform constant folding of BIT_INSERT_EXPR. */
+ if (TREE_CODE (arg2) == VECTOR_CST
+ && TREE_CODE (op0) == VECTOR_CST
+ && TREE_CODE (op1) == VECTOR_CST)
{
/* Build a vector of integers from the tree mask. */
vec_perm_builder builder;
poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
bool single_arg = (op0 == op1);
vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
-
- /* Check for cases that fold to OP0 or OP1 in their original
- element order. */
- if (sel.series_p (0, 1, 0, 1))
- return op0;
- if (sel.series_p (0, 1, nelts, 1))
- return op1;
-
- if (!single_arg)
- {
- if (sel.all_from_input_p (0))
- op1 = op0;
- else if (sel.all_from_input_p (1))
- {
- op0 = op1;
- sel.rotate_inputs (1);
- }
- }
-
- if ((TREE_CODE (op0) == VECTOR_CST
- || TREE_CODE (op0) == CONSTRUCTOR)
- && (TREE_CODE (op1) == VECTOR_CST
- || TREE_CODE (op1) == CONSTRUCTOR))
- {
- tree t = fold_vec_perm (type, op0, op1, sel);
- if (t != NULL_TREE)
- return t;
- }
-
- bool changed = (op0 == op1 && !single_arg);
-
- /* Generate a canonical form of the selector. */
- if (arg2 == op2 && sel.encoding () != builder)
- {
- /* Some targets are deficient and fail to expand a single
- argument permutation while still allowing an equivalent
- 2-argument version. */
- if (sel.ninputs () == 2
- || can_vec_perm_const_p (TYPE_MODE (type), sel, false))
- op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel);
- else
- {
- vec_perm_indices sel2 (builder, 2, nelts);
- if (can_vec_perm_const_p (TYPE_MODE (type), sel2, false))
- op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel2);
- else
- /* Not directly supported with either encoding,
- so use the preferred form. */
- op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel);
- }
- changed = true;
- }
-
- if (changed)
- return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
+ return fold_vec_perm (type, op0, op1, sel);
}
return NULL_TREE;
}
/* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
- of an array (or vector). */
+ of an array (or vector). *CTOR_IDX if non-NULL is updated with the
+ constructor element index of the value returned. If the element is
+ not found NULL_TREE is returned and *CTOR_IDX is updated to
+ the index of the element after the ACCESS_INDEX position (which
+ may be outside of the CTOR array). */
tree
-get_array_ctor_element_at_index (tree ctor, offset_int access_index)
+get_array_ctor_element_at_index (tree ctor, offset_int access_index,
+ unsigned *ctor_idx)
{
tree index_type = NULL_TREE;
+ signop index_sgn = UNSIGNED;
offset_int low_bound = 0;
if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
/* Static constructors for variably sized objects makes no sense. */
gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
- low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
+ /* ??? When it is obvious that the range is signed, treat it so. */
+ if (TYPE_UNSIGNED (index_type)
+ && TYPE_MAX_VALUE (domain_type)
+ && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
+ TYPE_MIN_VALUE (domain_type)))
+ {
+ index_sgn = SIGNED;
+ low_bound
+ = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
+ SIGNED);
+ }
+ else
+ {
+ index_sgn = TYPE_SIGN (index_type);
+ low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
+ }
}
}
if (index_type)
access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
- TYPE_SIGN (index_type));
+ index_sgn);
- offset_int index = low_bound - 1;
+ offset_int index = low_bound;
if (index_type)
- index = wi::ext (index, TYPE_PRECISION (index_type),
- TYPE_SIGN (index_type));
+ index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
- offset_int max_index;
- unsigned HOST_WIDE_INT cnt;
+ offset_int max_index = index;
+ unsigned cnt;
tree cfield, cval;
+ bool first_p = true;
FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
{
if (cfield)
{
if (TREE_CODE (cfield) == INTEGER_CST)
- max_index = index = wi::to_offset (cfield);
+ max_index = index
+ = offset_int::from (wi::to_wide (cfield), index_sgn);
else
{
gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
- index = wi::to_offset (TREE_OPERAND (cfield, 0));
- max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
+ index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
+ index_sgn);
+ max_index
+ = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
+ index_sgn);
+ gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
}
}
- else
+ else if (!first_p)
{
- index += 1;
+ index = max_index + 1;
if (index_type)
- index = wi::ext (index, TYPE_PRECISION (index_type),
- TYPE_SIGN (index_type));
+ index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
+ gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
max_index = index;
}
+ else
+ first_p = false;
- /* Do we have match? */
- if (wi::cmpu (access_index, index) >= 0
- && wi::cmpu (access_index, max_index) <= 0)
- return cval;
- }
+ /* Do we have match? */
+ if (wi::cmp (access_index, index, index_sgn) >= 0)
+ {
+ if (wi::cmp (access_index, max_index, index_sgn) <= 0)
+ {
+ if (ctor_idx)
+ *ctor_idx = cnt;
+ return cval;
+ }
+ }
+ else if (in_gimple_form)
+ /* We're past the element we search for. Note during parsing
+ the elements might not be sorted.
+ ??? We should use a binary search and a flag on the
+ CONSTRUCTOR as to whether elements are sorted in declaration
+ order. */
+ break;
+ }
+ if (ctor_idx)
+ *ctor_idx = cnt;
return NULL_TREE;
}
CASE_CFN_RINT_FN:
CASE_CFN_ROUND:
CASE_CFN_ROUND_FN:
+ CASE_CFN_ROUNDEVEN:
+ CASE_CFN_ROUNDEVEN_FN:
CASE_CFN_SCALB:
CASE_CFN_SCALBLN:
CASE_CFN_SCALBN:
CASE_CFN_RINT_FN:
CASE_CFN_ROUND:
CASE_CFN_ROUND_FN:
+ CASE_CFN_ROUNDEVEN:
+ CASE_CFN_ROUNDEVEN_FN:
CASE_CFN_TRUNC:
CASE_CFN_TRUNC_FN:
return true;
return NULL;
}
+/* Folds a read from vector element at IDX of vector ARG. */
+
+tree
+fold_read_from_vector (tree arg, poly_uint64 idx)
+{
+ unsigned HOST_WIDE_INT i;
+ if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
+ && known_ge (idx, 0u)
+ && idx.is_constant (&i))
+ {
+ if (TREE_CODE (arg) == VECTOR_CST)
+ return VECTOR_CST_ELT (arg, i);
+ else if (TREE_CODE (arg) == CONSTRUCTOR)
+ {
+ if (i >= CONSTRUCTOR_NELTS (arg))
+ return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
+ return CONSTRUCTOR_ELT (arg, i)->value;
+ }
+ }
+ return NULL_TREE;
+}
+
/* Return the tree for neg (ARG0) when ARG0 is known to be either
an integer constant, real, or fixed-point constant.
{
tree elem0 = VECTOR_CST_ELT (op0, i);
tree elem1 = VECTOR_CST_ELT (op1, i);
- tree tmp = fold_relational_const (code, type, elem0, elem1);
+ tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
if (tmp == NULL_TREE)
return NULL_TREE;
if (integer_zerop (tmp))
- return constant_boolean_node (false, type);
+ return constant_boolean_node (code == NE_EXPR, type);
}
- return constant_boolean_node (true, type);
+ return constant_boolean_node (code == EQ_EXPR, type);
}
tree_vector_builder elts;
if (!elts.new_binary_operation (type, op0, op1, false))
tree type = build_vector_type (inner_type, 4);
tree zero = build_zero_cst (type);
tree one = build_one_cst (type);
+ tree index = build_index_vector (type, 0, 1);
/* Verify equality tests that return a scalar boolean result. */
tree res_type = boolean_type_node;
ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
+ ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
+ ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
+ index, one)));
+ ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
+ index, index)));
+ ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
+ index, index)));
}
/* Verify folding of VEC_DUPLICATE_EXPRs. */