+2017-12-20 Richard Sandiford <richard.sandiford@linaro.org>
+ Alan Hayward <alan.hayward@arm.com>
+ David Sherwood <david.sherwood@arm.com>
+
+ * doc/generic.texi (POLY_INT_CST): Document.
+ * tree.def (POLY_INT_CST): New tree code.
+ * treestruct.def (TS_POLY_INT_CST): New tree layout.
+ * tree-core.h (tree_poly_int_cst): New struct.
+ (tree_node): Add a poly_int_cst field.
+ * tree.h (POLY_INT_CST_P, POLY_INT_CST_COEFF): New macros.
+ (wide_int_to_tree, force_fit_type): Take a poly_wide_int_ref
+ instead of a wide_int_ref.
+ (build_int_cst, build_int_cst_type): Take a poly_int64 instead
+ of a HOST_WIDE_INT.
+ (build_int_cstu, build_array_type_nelts): Take a poly_uint64
+ instead of an unsigned HOST_WIDE_INT.
+ (build_poly_int_cst, tree_fits_poly_int64_p, tree_fits_poly_uint64_p)
+ (ptrdiff_tree_p): Declare.
+ (tree_to_poly_int64, tree_to_poly_uint64): Likewise. Provide
+ extern inline implementations if the target doesn't use POLY_INT_CST.
+ (poly_int_tree_p): New function.
+ (wi::unextended_tree): New class.
+ (wi::int_traits <unextended_tree>): New override.
+ (wi::extended_tree): Add a default constructor.
+ (wi::extended_tree::get_tree): New function.
+ (wi::widest_extended_tree, wi::offset_extended_tree): New typedefs.
+ (wi::tree_to_widest_ref, wi::tree_to_offset_ref): Use them.
+ (wi::tree_to_poly_widest_ref, wi::tree_to_poly_offset_ref)
+ (wi::tree_to_poly_wide_ref): New typedefs.
+ (wi::ints_for): Provide overloads for extended_tree and
+ unextended_tree.
+ (poly_int_cst_value, wi::to_poly_widest, wi::to_poly_offset)
+ (wi::to_wide): New functions.
+ (wi::fits_to_boolean_p, wi::fits_to_tree_p): Handle poly_ints.
+ * tree.c (poly_int_cst_hasher): New struct.
+ (poly_int_cst_hash_table): New variable.
+ (tree_node_structure_for_code, tree_code_size, simple_cst_equal)
+ (valid_constant_size_p, add_expr, drop_tree_overflow): Handle
+ POLY_INT_CST.
+ (initialize_tree_contains_struct): Handle TS_POLY_INT_CST.
+ (init_ttree): Initialize poly_int_cst_hash_table.
+ (build_int_cst, build_int_cst_type, build_invariant_address): Take
+ a poly_int64 instead of a HOST_WIDE_INT.
+ (build_int_cstu, build_array_type_nelts): Take a poly_uint64
+ instead of an unsigned HOST_WIDE_INT.
+ (wide_int_to_tree): Rename to...
+ (wide_int_to_tree_1): ...this.
+ (build_new_poly_int_cst, build_poly_int_cst): New functions.
+ (force_fit_type): Take a poly_wide_int_ref instead of a wide_int_ref.
+ (wide_int_to_tree): New function that takes a poly_wide_int_ref.
+ (ptrdiff_tree_p, tree_to_poly_int64, tree_to_poly_uint64)
+ (tree_fits_poly_int64_p, tree_fits_poly_uint64_p): New functions.
+ * lto-streamer-out.c (DFS::DFS_write_tree_body, hash_tree): Handle
+ TS_POLY_INT_CST.
+ * tree-streamer-in.c (lto_input_ts_poly_tree_pointers): Likewise.
+ (streamer_read_tree_body): Likewise.
+ * tree-streamer-out.c (write_ts_poly_tree_pointers): Likewise.
+ (streamer_write_tree_body): Likewise.
+ * tree-streamer.c (streamer_check_handled_ts_structures): Likewise.
+ * asan.c (asan_protect_global): Require the size to be an INTEGER_CST.
+ * cfgexpand.c (expand_debug_expr): Handle POLY_INT_CST.
+ * expr.c (expand_expr_real_1, const_vector_from_tree): Likewise.
+ * gimple-expr.h (is_gimple_constant): Likewise.
+ * gimplify.c (maybe_with_size_expr): Likewise.
+ * print-tree.c (print_node): Likewise.
+ * tree-data-ref.c (data_ref_compare_tree): Likewise.
+ * tree-pretty-print.c (dump_generic_node): Likewise.
+ * tree-ssa-address.c (addr_for_mem_ref): Likewise.
+ * tree-vect-data-refs.c (dr_group_sort_cmp): Likewise.
+ * tree-vrp.c (compare_values_warnv): Likewise.
+ * tree-ssa-loop-ivopts.c (determine_base_object, constant_multiple_of)
+ (get_loop_invariant_expr, add_candidate_1, get_computation_aff_1)
+ (force_expr_to_var_cost): Likewise.
+ * tree-ssa-loop.c (for_each_index): Likewise.
+ * fold-const.h (build_invariant_address, size_int_kind): Take a
+ poly_int64 instead of a HOST_WIDE_INT.
+ * fold-const.c (fold_negate_expr_1, const_binop, const_unop)
+ (fold_convert_const, multiple_of_p, fold_negate_const): Handle
+ POLY_INT_CST.
+ (size_binop_loc): Likewise. Allow int_const_binop_1 to fail.
+ (int_const_binop_2): New function, split out from...
+ (int_const_binop_1): ...here. Handle POLY_INT_CST.
+ (size_int_kind): Take a poly_int64 instead of a HOST_WIDE_INT.
+ * expmed.c (make_tree): Handle CONST_POLY_INT_P.
+ * gimple-ssa-strength-reduction.c (slsr_process_add)
+ (slsr_process_mul): Check for INTEGER_CSTs before using them
+ as candidates.
+ * stor-layout.c (bits_from_bytes): New function.
+ (bit_from_pos): Use it.
+ (layout_type): Likewise. For vectors, multiply the TYPE_SIZE_UNIT
+ by BITS_PER_UNIT to get the TYPE_SIZE.
+ * tree-cfg.c (verify_expr, verify_types_in_gimple_reference): Allow
+ MEM_REF and TARGET_MEM_REF offsets to be a POLY_INT_CST.
+
2017-12-20 Richard Sandiford <richard.sandiford@linaro.org>
Alan Hayward <alan.hayward@arm.com>
David Sherwood <david.sherwood@arm.com>
&& !section_sanitized_p (DECL_SECTION_NAME (decl)))
|| DECL_SIZE (decl) == 0
|| ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
+ || TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
|| !valid_constant_size_p (DECL_SIZE_UNIT (decl))
|| DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
|| TREE_TYPE (decl) == ubsan_get_source_location_type ()
op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
return op0;
+ case POLY_INT_CST:
+ return immed_wide_int_const (poly_int_cst_value (exp), mode);
+
case COMPLEX_CST:
gcc_assert (COMPLEX_MODE_P (mode));
op0 = expand_debug_expr (TREE_REALPART (exp));
@tindex COMPLEX_CST
@tindex VECTOR_CST
@tindex STRING_CST
+@tindex POLY_INT_CST
@findex TREE_STRING_LENGTH
@findex TREE_STRING_POINTER
FIXME: The formats of string constants are not well-defined when the
target system bytes are not the same width as host system bytes.
+@item POLY_INT_CST
+These nodes represent invariants that depend on some target-specific
+runtime parameters. They consist of @code{NUM_POLY_INT_COEFFS}
+coefficients, with the first coefficient being the constant term and
+the others being multipliers that are applied to the runtime parameters.
+
+@code{POLY_INT_CST_ELT (@var{x}, @var{i})} references coefficient number
+@var{i} of @code{POLY_INT_CST} node @var{x}. Each coefficient is an
+@code{INTEGER_CST}.
+
@end table
@node Storage References
/* fall through. */
default:
+ if (CONST_POLY_INT_P (x))
+ return wide_int_to_tree (t, const_poly_int_value (x));
+
t = build_decl (RTL_LOCATION (x), VAR_DECL, NULL_TREE, type);
/* If TYPE is a POINTER_TYPE, we might need to convert X from
copy_rtx (XEXP (temp, 0)));
return temp;
+ case POLY_INT_CST:
+ return immed_wide_int_const (poly_int_cst_value (exp), mode);
+
case SAVE_EXPR:
{
tree val = treeop0;
RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
inner);
else
- RTVEC_ELT (v, i) = immed_wide_int_const (wi::to_wide (elt), inner);
+ RTVEC_ELT (v, i) = immed_wide_int_const (wi::to_poly_wide (elt),
+ inner);
}
return gen_rtx_CONST_VECTOR (mode, v);
return tem;
break;
+ case POLY_INT_CST:
case REAL_CST:
- tem = fold_negate_const (t, type);
- return tem;
-
case FIXED_CST:
tem = fold_negate_const (t, type);
return tem;
&& TYPE_MODE (type1) == TYPE_MODE (type2);
}
-
-/* Combine two integer constants PARG1 and PARG2 under operation CODE
- to produce a new constant. Return NULL_TREE if we don't know how
- to evaluate CODE at compile-time. */
+/* Subroutine of int_const_binop_1 that handles two INTEGER_CSTs. */
static tree
-int_const_binop_1 (enum tree_code code, const_tree parg1, const_tree parg2,
+int_const_binop_2 (enum tree_code code, const_tree parg1, const_tree parg2,
int overflowable)
{
wide_int res;
return t;
}
+/* Combine two integer constants PARG1 and PARG2 under operation CODE
+ to produce a new constant. Return NULL_TREE if we don't know how
+ to evaluate CODE at compile-time. */
+
+static tree
+int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
+ int overflowable)
+{
+ if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
+ return int_const_binop_2 (code, arg1, arg2, overflowable);
+
+ gcc_assert (NUM_POLY_INT_COEFFS != 1);
+
+ if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
+ {
+ poly_wide_int res;
+ bool overflow;
+ tree type = TREE_TYPE (arg1);
+ signop sign = TYPE_SIGN (type);
+ switch (code)
+ {
+ case PLUS_EXPR:
+ res = wi::add (wi::to_poly_wide (arg1),
+ wi::to_poly_wide (arg2), sign, &overflow);
+ break;
+
+ case MINUS_EXPR:
+ res = wi::sub (wi::to_poly_wide (arg1),
+ wi::to_poly_wide (arg2), sign, &overflow);
+ break;
+
+ case MULT_EXPR:
+ if (TREE_CODE (arg2) == INTEGER_CST)
+ res = wi::mul (wi::to_poly_wide (arg1),
+ wi::to_wide (arg2), sign, &overflow);
+ else if (TREE_CODE (arg1) == INTEGER_CST)
+ res = wi::mul (wi::to_poly_wide (arg2),
+ wi::to_wide (arg1), sign, &overflow);
+ else
+ return NULL_TREE;
+ break;
+
+ case LSHIFT_EXPR:
+ if (TREE_CODE (arg2) == INTEGER_CST)
+ res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
+ else
+ return NULL_TREE;
+ break;
+
+ case BIT_IOR_EXPR:
+ if (TREE_CODE (arg2) != INTEGER_CST
+ || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
+ &res))
+ return NULL_TREE;
+ break;
+
+ default:
+ return NULL_TREE;
+ }
+ return force_fit_type (type, res, overflowable,
+ (((sign == SIGNED || overflowable == -1)
+ && overflow)
+ | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
+ }
+
+ return NULL_TREE;
+}
+
tree
int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
{
STRIP_NOPS (arg1);
STRIP_NOPS (arg2);
- if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
+ if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
{
if (code == POINTER_PLUS_EXPR)
return int_const_binop (PLUS_EXPR,
case BIT_NOT_EXPR:
if (TREE_CODE (arg0) == INTEGER_CST)
return fold_not_const (arg0, type);
+ else if (POLY_INT_CST_P (arg0))
+ return wide_int_to_tree (type, -poly_int_cst_value (arg0));
/* Perform BIT_NOT_EXPR on each element individually. */
else if (TREE_CODE (arg0) == VECTOR_CST)
{
indicates which particular sizetype to create. */
tree
-size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
+size_int_kind (poly_int64 number, enum size_type_kind kind)
{
return build_int_cst (sizetype_tab[(int) kind], number);
}
gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
TREE_TYPE (arg1)));
- /* Handle the special case of two integer constants faster. */
- if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
+ /* Handle the special case of two poly_int constants faster. */
+ if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
{
/* And some specific cases even faster than that. */
if (code == PLUS_EXPR)
/* Handle general case of two integer constants. For sizetype
constant calculations we always want to know about overflow,
even in the unsigned case. */
- return int_const_binop_1 (code, arg0, arg1, -1);
+ tree res = int_const_binop_1 (code, arg0, arg1, -1);
+ if (res != NULL_TREE)
+ return res;
}
return fold_build2_loc (loc, code, type, arg0, arg1);
static tree
fold_convert_const (enum tree_code code, tree type, tree arg1)
{
- if (TREE_TYPE (arg1) == type)
+ tree arg_type = TREE_TYPE (arg1);
+ if (arg_type == type)
return arg1;
+ /* We can't widen types, since the runtime value could overflow the
+ original type before being extended to the new type. */
+ if (POLY_INT_CST_P (arg1)
+ && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
+ && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
+ return build_poly_int_cst (type,
+ poly_wide_int::from (poly_int_cst_value (arg1),
+ TYPE_PRECISION (type),
+ TYPE_SIGN (arg_type)));
+
if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
|| TREE_CODE (type) == OFFSET_TYPE)
{
/* fall through */
default:
+ if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
+ return multiple_p (wi::to_poly_widest (top),
+ wi::to_poly_widest (bottom));
+
return 0;
}
}
switch (TREE_CODE (arg0))
{
- case INTEGER_CST:
- {
- bool overflow;
- wide_int val = wi::neg (wi::to_wide (arg0), &overflow);
- t = force_fit_type (type, val, 1,
- (overflow && ! TYPE_UNSIGNED (type))
- || TREE_OVERFLOW (arg0));
- break;
- }
-
case REAL_CST:
t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
break;
}
default:
+ if (poly_int_tree_p (arg0))
+ {
+ bool overflow;
+ poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
+ t = force_fit_type (type, res, 1,
+ (overflow && ! TYPE_UNSIGNED (type))
+ || TREE_OVERFLOW (arg0));
+ break;
+ }
+
gcc_unreachable ();
}
#define build_simple_mem_ref(T)\
build_simple_mem_ref_loc (UNKNOWN_LOCATION, T)
extern offset_int mem_ref_offset (const_tree);
-extern tree build_invariant_address (tree, tree, HOST_WIDE_INT);
+extern tree build_invariant_address (tree, tree, poly_int64);
extern tree constant_boolean_node (bool, tree);
extern tree div_if_zero_remainder (const_tree, const_tree);
extern tree round_up_loc (location_t, tree, unsigned int);
#define round_down(T,N) round_down_loc (UNKNOWN_LOCATION, T, N)
extern tree round_down_loc (location_t, tree, int);
-extern tree size_int_kind (HOST_WIDE_INT, enum size_type_kind);
+extern tree size_int_kind (poly_int64, enum size_type_kind);
#define size_binop(CODE,T1,T2)\
size_binop_loc (UNKNOWN_LOCATION, CODE, T1, T2)
extern tree size_binop_loc (location_t, enum tree_code, tree, tree);
switch (TREE_CODE (t))
{
case INTEGER_CST:
+ case POLY_INT_CST:
case REAL_CST:
case FIXED_CST:
case COMPLEX_CST:
c2 = create_mul_ssa_cand (gs, rhs2, rhs1, speed);
c->next_interp = c2->cand_num;
}
- else
+ else if (TREE_CODE (rhs2) == INTEGER_CST)
{
/* Record an interpretation for the multiply-immediate. */
c = create_mul_imm_cand (gs, rhs1, rhs2, speed);
add_cand_for_stmt (gs, c2);
}
}
- else
+ else if (TREE_CODE (rhs2) == INTEGER_CST)
{
/* Record an interpretation for the add-immediate. */
widest_int index = wi::to_widest (rhs2);
/* If the size isn't known or is a constant, we have nothing to do. */
size = TYPE_SIZE_UNIT (type);
- if (!size || TREE_CODE (size) == INTEGER_CST)
+ if (!size || poly_int_tree_p (size))
return;
/* Otherwise, make a WITH_SIZE_EXPR. */
DFS_follow_tree_edge (VECTOR_CST_ENCODED_ELT (expr, i));
}
+ if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ DFS_follow_tree_edge (POLY_INT_CST_COEFF (expr, i));
+
if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
{
DFS_follow_tree_edge (TREE_REALPART (expr));
visit (VECTOR_CST_ENCODED_ELT (t, i));
}
+ if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ visit (POLY_INT_CST_COEFF (t, i));
+
if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
{
visit (TREE_REALPART (t));
}
break;
+ case POLY_INT_CST:
+ {
+ char buf[10];
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ {
+ snprintf (buf, sizeof (buf), "elt%u: ", i);
+ print_node (file, buf, POLY_INT_CST_COEFF (node, i),
+ indent + 4);
+ }
+ }
+ break;
+
case IDENTIFIER_NODE:
lang_hooks.print_identifier (file, node, indent);
break;
return rli;
}
+/* Fold sizetype value X to bitsizetype, given that X represents a type
+ size or offset. */
+
+static tree
+bits_from_bytes (tree x)
+{
+ if (POLY_INT_CST_P (x))
+ /* The runtime calculation isn't allowed to overflow sizetype;
+ increasing the runtime values must always increase the size
+ or offset of the object. This means that the object imposes
+ a maximum value on the runtime parameters, but we don't record
+ what that is. */
+ return build_poly_int_cst
+ (bitsizetype,
+ poly_wide_int::from (poly_int_cst_value (x),
+ TYPE_PRECISION (bitsizetype),
+ TYPE_SIGN (TREE_TYPE (x))));
+ x = fold_convert (bitsizetype, x);
+ gcc_checking_assert (x);
+ return x;
+}
+
/* Return the combined bit position for the byte offset OFFSET and the
bit position BITPOS.
bit_from_pos (tree offset, tree bitpos)
{
return size_binop (PLUS_EXPR, bitpos,
- size_binop (MULT_EXPR,
- fold_convert (bitsizetype, offset),
+ size_binop (MULT_EXPR, bits_from_bytes (offset),
bitsize_unit_node));
}
TYPE_SIZE_UNIT (type) = int_const_binop (MULT_EXPR,
TYPE_SIZE_UNIT (innertype),
size_int (nunits));
- TYPE_SIZE (type) = int_const_binop (MULT_EXPR,
- TYPE_SIZE (innertype),
- bitsize_int (nunits));
+ TYPE_SIZE (type) = int_const_binop
+ (MULT_EXPR,
+ bits_from_bytes (TYPE_SIZE_UNIT (type)),
+ bitsize_int (BITS_PER_UNIT));
/* For vector types, we do not default to the mode's alignment.
Instead, query a target hook, defaulting to natural alignment.
length = size_zero_node;
TYPE_SIZE (type) = size_binop (MULT_EXPR, element_size,
- fold_convert (bitsizetype,
- length));
+ bits_from_bytes (length));
/* If we know the size of the element, calculate the total size
directly, rather than do some division thing below. This
error ("invalid first operand of MEM_REF");
return x;
}
- if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
+ if (!poly_int_tree_p (TREE_OPERAND (t, 1))
|| !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
{
error ("invalid offset operand of MEM_REF");
debug_generic_stmt (expr);
return true;
}
- if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
+ if (!poly_int_tree_p (TREE_OPERAND (expr, 1))
|| !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
{
error ("invalid offset operand in MEM_REF");
return true;
}
if (!TMR_OFFSET (expr)
- || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
+ || !poly_int_tree_p (TMR_OFFSET (expr))
|| !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
{
error ("invalid offset operand in TARGET_MEM_REF");
tree GTY ((length ("vector_cst_encoded_nelts ((tree) &%h)"))) elts[1];
};
+struct GTY(()) tree_poly_int_cst {
+ struct tree_typed typed;
+ tree coeffs[NUM_POLY_INT_COEFFS];
+};
+
struct GTY(()) tree_identifier {
struct tree_common common;
struct ht_identifier id;
struct tree_typed GTY ((tag ("TS_TYPED"))) typed;
struct tree_common GTY ((tag ("TS_COMMON"))) common;
struct tree_int_cst GTY ((tag ("TS_INT_CST"))) int_cst;
+ struct tree_poly_int_cst GTY ((tag ("TS_POLY_INT_CST"))) poly_int_cst;
struct tree_real_cst GTY ((tag ("TS_REAL_CST"))) real_cst;
struct tree_fixed_cst GTY ((tag ("TS_FIXED_CST"))) fixed_cst;
struct tree_vector GTY ((tag ("TS_VECTOR"))) vector;
break;
default:
+ if (POLY_INT_CST_P (t1))
+ return compare_sizes_for_sort (wi::to_poly_widest (t1),
+ wi::to_poly_widest (t2));
+
tclass = TREE_CODE_CLASS (code);
/* For decls, compare their UIDs. */
pp_string (pp, "(OVF)");
break;
+ case POLY_INT_CST:
+ pp_string (pp, "POLY_INT_CST [");
+ dump_generic_node (pp, POLY_INT_CST_COEFF (node, 0), spc, flags, false);
+ for (unsigned int i = 1; i < NUM_POLY_INT_COEFFS; ++i)
+ {
+ pp_string (pp, ", ");
+ dump_generic_node (pp, POLY_INT_CST_COEFF (node, i),
+ spc, flags, false);
+ }
+ pp_string (pp, "]");
+ break;
+
case REAL_CST:
/* Code copied from print_node. */
{
if (addr->offset && !integer_zerop (addr->offset))
{
- offset_int dc = offset_int::from (wi::to_wide (addr->offset), SIGNED);
+ poly_offset_int dc
+ = poly_offset_int::from (wi::to_poly_wide (addr->offset), SIGNED);
off = immed_wide_int_const (dc, pointer_mode);
}
else
gcc_unreachable ();
default:
+ if (POLY_INT_CST_P (expr))
+ return NULL_TREE;
return fold_convert (ptr_type_node, expr);
}
}
return res == 0;
default:
+ if (POLY_INT_CST_P (top)
+ && POLY_INT_CST_P (bot)
+ && constant_multiple_p (wi::to_poly_widest (top),
+ wi::to_poly_widest (bot), mul))
+ return true;
+
return false;
}
}
{
STRIP_NOPS (inv_expr);
- if (TREE_CODE (inv_expr) == INTEGER_CST || TREE_CODE (inv_expr) == SSA_NAME)
+ if (poly_int_tree_p (inv_expr)
+ || TREE_CODE (inv_expr) == SSA_NAME)
return NULL;
/* Don't strip constant part away as we used to. */
cand->incremented_at = incremented_at;
data->vcands.safe_push (cand);
- if (TREE_CODE (step) != INTEGER_CST)
+ if (!poly_int_tree_p (step))
{
find_inv_vars (data, &step, &cand->inv_vars);
if (TYPE_PRECISION (utype) < TYPE_PRECISION (ctype))
{
if (cand->orig_iv != NULL && CONVERT_EXPR_P (cbase)
- && (CONVERT_EXPR_P (cstep) || TREE_CODE (cstep) == INTEGER_CST))
+ && (CONVERT_EXPR_P (cstep) || poly_int_tree_p (cstep)))
{
tree inner_base, inner_step, inner_type;
inner_base = TREE_OPERAND (cbase, 0);
if (is_gimple_min_invariant (expr))
{
- if (TREE_CODE (expr) == INTEGER_CST)
+ if (poly_int_tree_p (expr))
return comp_cost (integer_cost [speed], 0);
if (TREE_CODE (expr) == ADDR_EXPR)
case VECTOR_CST:
case COMPLEX_CST:
case INTEGER_CST:
+ case POLY_INT_CST:
case REAL_CST:
case FIXED_CST:
case CONSTRUCTOR:
}
+/* Read all pointer fields in the TS_POLY_INT_CST structure of EXPR from
+ input block IB. DATA_IN contains tables and descriptors for the
+ file being read. */
+
+static void
+lto_input_ts_poly_tree_pointers (struct lto_input_block *ib,
+ struct data_in *data_in, tree expr)
+{
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ POLY_INT_CST_COEFF (expr, i) = stream_read_tree (ib, data_in);
+}
+
+
/* Read all pointer fields in the TS_COMPLEX structure of EXPR from input
block IB. DATA_IN contains tables and descriptors for the
file being read. */
if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
lto_input_ts_vector_tree_pointers (ib, data_in, expr);
+ if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
+ lto_input_ts_poly_tree_pointers (ib, data_in, expr);
+
if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
lto_input_ts_complex_tree_pointers (ib, data_in, expr);
}
+/* Write all pointer fields in the TS_POLY_INT_CST structure of EXPR to
+ output block OB. If REF_P is true, write a reference to EXPR's pointer
+ fields. */
+
+static void
+write_ts_poly_tree_pointers (struct output_block *ob, tree expr, bool ref_p)
+{
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ stream_write_tree (ob, POLY_INT_CST_COEFF (expr, i), ref_p);
+}
+
+
/* Write all pointer fields in the TS_COMPLEX structure of EXPR to output
block OB. If REF_P is true, write a reference to EXPR's pointer
fields. */
if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
write_ts_vector_tree_pointers (ob, expr, ref_p);
+ if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
+ write_ts_poly_tree_pointers (ob, expr, ref_p);
+
if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
write_ts_complex_tree_pointers (ob, expr, ref_p);
handled_p[TS_TYPED] = true;
handled_p[TS_COMMON] = true;
handled_p[TS_INT_CST] = true;
+ handled_p[TS_POLY_INT_CST] = true;
handled_p[TS_REAL_CST] = true;
handled_p[TS_FIXED_CST] = true;
handled_p[TS_VECTOR] = true;
return cmp;
/* Then sort after DR_INIT. In case of identical DRs sort after stmt UID. */
- cmp = tree_int_cst_compare (DR_INIT (dra), DR_INIT (drb));
+ cmp = data_ref_compare_tree (DR_INIT (dra), DR_INIT (drb));
if (cmp == 0)
return gimple_uid (DR_STMT (dra)) < gimple_uid (DR_STMT (drb)) ? -1 : 1;
return cmp;
if (TREE_OVERFLOW (val1) || TREE_OVERFLOW (val2))
return -2;
- return tree_int_cst_compare (val1, val2);
+ if (TREE_CODE (val1) == INTEGER_CST
+ && TREE_CODE (val2) == INTEGER_CST)
+ return tree_int_cst_compare (val1, val2);
+
+ if (poly_int_tree_p (val1) && poly_int_tree_p (val2))
+ {
+ if (known_eq (wi::to_poly_widest (val1),
+ wi::to_poly_widest (val2)))
+ return 0;
+ if (known_lt (wi::to_poly_widest (val1),
+ wi::to_poly_widest (val2)))
+ return -1;
+ if (known_gt (wi::to_poly_widest (val1),
+ wi::to_poly_widest (val2)))
+ return 1;
+ }
+
+ return -2;
}
else
{
static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
+/* Class and variable for making sure that there is a single POLY_INT_CST
+ for a given value. */
+struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
+{
+ typedef std::pair<tree, const poly_wide_int *> compare_type;
+ static hashval_t hash (tree t);
+ static bool equal (tree x, const compare_type &y);
+};
+
+static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
+
/* Hash table for optimization flags and target option flags. Use the same
hash table for both sets of options. Nodes for building the current
optimization and target option nodes. The assumption is most of the time
/* tcc_constant cases. */
case VOID_CST: return TS_TYPED;
case INTEGER_CST: return TS_INT_CST;
+ case POLY_INT_CST: return TS_POLY_INT_CST;
case REAL_CST: return TS_REAL_CST;
case FIXED_CST: return TS_FIXED_CST;
case COMPLEX_CST: return TS_COMPLEX;
case TS_COMMON:
case TS_INT_CST:
+ case TS_POLY_INT_CST:
case TS_REAL_CST:
case TS_FIXED_CST:
case TS_VECTOR:
int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
+ poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
+
int_cst_node = make_int_cst (1, 1);
cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
{
case VOID_CST: return sizeof (tree_typed);
case INTEGER_CST: gcc_unreachable ();
+ case POLY_INT_CST: return sizeof (tree_poly_int_cst);
case REAL_CST: return sizeof (tree_real_cst);
case FIXED_CST: return sizeof (tree_fixed_cst);
case COMPLEX_CST: return sizeof (tree_complex);
return nt;
}
-/* Create an INT_CST node with a LOW value sign extended to TYPE. */
+/* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
+
+static tree
+build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS])
+{
+ size_t length = sizeof (struct tree_poly_int_cst);
+ record_node_allocation_statistics (POLY_INT_CST, length);
+
+ tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
+
+ TREE_SET_CODE (t, POLY_INT_CST);
+ TREE_CONSTANT (t) = 1;
+ TREE_TYPE (t) = type;
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ POLY_INT_CST_COEFF (t, i) = coeffs[i];
+ return t;
+}
+
+/* Create a constant tree that contains CST sign-extended to TYPE. */
tree
-build_int_cst (tree type, HOST_WIDE_INT low)
+build_int_cst (tree type, poly_int64 cst)
{
/* Support legacy code. */
if (!type)
type = integer_type_node;
- return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
+ return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
}
+/* Create a constant tree that contains CST zero-extended to TYPE. */
+
tree
-build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
+build_int_cstu (tree type, poly_uint64 cst)
{
return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
}
-/* Create an INT_CST node with a LOW value sign extended to TYPE. */
+/* Create a constant tree that contains CST sign-extended to TYPE. */
tree
-build_int_cst_type (tree type, HOST_WIDE_INT low)
+build_int_cst_type (tree type, poly_int64 cst)
{
gcc_assert (type);
- return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
+ return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
}
/* Constructs tree in type TYPE from with value given by CST. Signedness
tree
-force_fit_type (tree type, const wide_int_ref &cst,
+force_fit_type (tree type, const poly_wide_int_ref &cst,
int overflowable, bool overflowed)
{
signop sign = TYPE_SIGN (type);
|| overflowable < 0
|| (overflowable > 0 && sign == SIGNED))
{
- wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
- tree t = build_new_int_cst (type, tmp);
+ poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
+ sign);
+ tree t;
+ if (tmp.is_constant ())
+ t = build_new_int_cst (type, tmp.coeffs[0]);
+ else
+ {
+ tree coeffs[NUM_POLY_INT_COEFFS];
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ {
+ coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
+ TREE_OVERFLOW (coeffs[i]) = 1;
+ }
+ t = build_new_poly_int_cst (type, coeffs);
+ }
TREE_OVERFLOW (t) = 1;
return t;
}
the upper bits and ensures that hashing and value equality based
upon the underlying HOST_WIDE_INTs works without masking. */
-tree
-wide_int_to_tree (tree type, const wide_int_ref &pcst)
+static tree
+wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
{
tree t;
int ix = -1;
return t;
}
+hashval_t
+poly_int_cst_hasher::hash (tree t)
+{
+ inchash::hash hstate;
+
+ hstate.add_int (TYPE_UID (TREE_TYPE (t)));
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
+
+ return hstate.end ();
+}
+
+bool
+poly_int_cst_hasher::equal (tree x, const compare_type &y)
+{
+ if (TREE_TYPE (x) != y.first)
+ return false;
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
+ return false;
+ return true;
+}
+
+/* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
+ The elements must also have type TYPE. */
+
+tree
+build_poly_int_cst (tree type, const poly_wide_int_ref &values)
+{
+ unsigned int prec = TYPE_PRECISION (type);
+ gcc_assert (prec <= values.coeffs[0].get_precision ());
+ poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
+
+ inchash::hash h;
+ h.add_int (TYPE_UID (type));
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ h.add_wide_int (c.coeffs[i]);
+ poly_int_cst_hasher::compare_type comp (type, &c);
+ tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
+ INSERT);
+ if (*slot == NULL_TREE)
+ {
+ tree coeffs[NUM_POLY_INT_COEFFS];
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
+ *slot = build_new_poly_int_cst (type, coeffs);
+ }
+ return *slot;
+}
+
+/* Create a constant tree with value VALUE in type TYPE. */
+
+tree
+wide_int_to_tree (tree type, const poly_wide_int_ref &value)
+{
+ if (value.is_constant ())
+ return wide_int_to_tree_1 (type, value.coeffs[0]);
+ return build_poly_int_cst (type, value);
+}
+
void
cache_integer_cst (tree t)
{
exp = TREE_OPERAND (exp, 0);
return TREE_CONSTANT (exp);
}
+
+/* Return true if T holds a polynomial pointer difference, storing it in
+ *VALUE if so. A true return means that T's precision is no greater
+ than 64 bits, which is the largest address space we support, so *VALUE
+ never loses precision. However, the signedness of the result does
+ not necessarily match the signedness of T: sometimes an unsigned type
+ like sizetype is used to encode a value that is actually negative. */
+
+bool
+ptrdiff_tree_p (const_tree t, poly_int64_pod *value)
+{
+ if (!t)
+ return false;
+ if (TREE_CODE (t) == INTEGER_CST)
+ {
+ if (!cst_and_fits_in_hwi (t))
+ return false;
+ *value = int_cst_value (t);
+ return true;
+ }
+ if (POLY_INT_CST_P (t))
+ {
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
+ return false;
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
+ return true;
+ }
+ return false;
+}
+
+poly_int64
+tree_to_poly_int64 (const_tree t)
+{
+ gcc_assert (tree_fits_poly_int64_p (t));
+ if (POLY_INT_CST_P (t))
+ return poly_int_cst_value (t).force_shwi ();
+ return TREE_INT_CST_LOW (t);
+}
+
+poly_uint64
+tree_to_poly_uint64 (const_tree t)
+{
+ gcc_assert (tree_fits_poly_uint64_p (t));
+ if (POLY_INT_CST_P (t))
+ return poly_int_cst_value (t).force_uhwi ();
+ return TREE_INT_CST_LOW (t);
+}
\f
/* Return first list element whose TREE_VALUE is ELEM.
Return 0 if ELEM is not in LIST. */
offsetted by OFFSET units. */
tree
-build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
+build_invariant_address (tree type, tree base, poly_int64 offset)
{
tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
build_fold_addr_expr (base),
&& wi::fits_shwi_p (wi::to_widest (t)));
}
+/* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
+ value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
+
+bool
+tree_fits_poly_int64_p (const_tree t)
+{
+ if (t == NULL_TREE)
+ return false;
+ if (POLY_INT_CST_P (t))
+ {
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
+ if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
+ return false;
+ return true;
+ }
+ return (TREE_CODE (t) == INTEGER_CST
+ && wi::fits_shwi_p (wi::to_widest (t)));
+}
+
/* Return true if T is an INTEGER_CST whose numerical value (extended
according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
&& wi::fits_uhwi_p (wi::to_widest (t)));
}
+/* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
+ value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
+
+bool
+tree_fits_poly_uint64_p (const_tree t)
+{
+ if (t == NULL_TREE)
+ return false;
+ if (POLY_INT_CST_P (t))
+ {
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
+ if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
+ return false;
+ return true;
+ }
+ return (TREE_CODE (t) == INTEGER_CST
+ && wi::fits_uhwi_p (wi::to_widest (t)));
+}
+
/* T is an INTEGER_CST whose numerical value (extended according to
TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
HOST_WIDE_INT. */
return 0;
default:
+ if (POLY_INT_CST_P (t1))
+ /* A false return means maybe_ne rather than known_ne. */
+ return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
+ TYPE_SIGN (TREE_TYPE (t1))),
+ poly_widest_int::from (poly_int_cst_value (t2),
+ TYPE_SIGN (TREE_TYPE (t2))));
break;
}
bool
valid_constant_size_p (const_tree size)
{
+ if (POLY_INT_CST_P (size))
+ {
+ if (TREE_OVERFLOW (size))
+ return false;
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
+ return false;
+ return true;
+ }
if (! tree_fits_uhwi_p (size)
|| TREE_OVERFLOW (size)
|| tree_int_cst_sign_bit (size) != 0)
}
/* FALL THROUGH */
default:
+ if (POLY_INT_CST_P (t))
+ {
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
+ return;
+ }
tclass = TREE_CODE_CLASS (code);
if (tclass == tcc_declaration)
sizetype. */
tree
-build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
+build_array_type_nelts (tree elt_type, poly_uint64 nelts)
{
return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
}
gcc_checking_assert (TREE_OVERFLOW (t));
/* For tree codes with a sharing machinery re-build the result. */
- if (TREE_CODE (t) == INTEGER_CST)
- return wide_int_to_tree (TREE_TYPE (t), wi::to_wide (t));
+ if (poly_int_tree_p (t))
+ return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
/* For VECTOR_CST, remove the overflow bits from the encoded elements
and canonicalize the result. */
some circumstances. */
DEFTREECODE (INTEGER_CST, "integer_cst", tcc_constant, 0)
+/* Contents are given by POLY_INT_CST_COEFF. */
+DEFTREECODE (POLY_INT_CST, "poly_int_cst", tcc_constant, 0)
+
/* Contents are in TREE_REAL_CST field. */
DEFTREECODE (REAL_CST, "real_cst", tcc_constant, 0)
#define TREE_INT_CST_LOW(NODE) \
((unsigned HOST_WIDE_INT) TREE_INT_CST_ELT (NODE, 0))
+/* Return true if NODE is a POLY_INT_CST. This is only ever true on
+ targets with variable-sized modes. */
+#define POLY_INT_CST_P(NODE) \
+ (NUM_POLY_INT_COEFFS > 1 && TREE_CODE (NODE) == POLY_INT_CST)
+
+/* In a POLY_INT_CST node. */
+#define POLY_INT_CST_COEFF(NODE, I) \
+ (POLY_INT_CST_CHECK (NODE)->poly_int_cst.coeffs[I])
+
#define TREE_REAL_CST_PTR(NODE) (REAL_CST_CHECK (NODE)->real_cst.real_cst_ptr)
#define TREE_REAL_CST(NODE) (*TREE_REAL_CST_PTR (NODE))
extern tree double_int_to_tree (tree, double_int);
-extern tree wide_int_to_tree (tree type, const wide_int_ref &cst);
-extern tree force_fit_type (tree, const wide_int_ref &, int, bool);
+extern tree wide_int_to_tree (tree type, const poly_wide_int_ref &cst);
+extern tree force_fit_type (tree, const poly_wide_int_ref &, int, bool);
/* Create an INT_CST node with a CST value zero extended. */
/* static inline */
-extern tree build_int_cst (tree, HOST_WIDE_INT);
-extern tree build_int_cstu (tree type, unsigned HOST_WIDE_INT cst);
-extern tree build_int_cst_type (tree, HOST_WIDE_INT);
+extern tree build_int_cst (tree, poly_int64);
+extern tree build_int_cstu (tree type, poly_uint64);
+extern tree build_int_cst_type (tree, poly_int64);
extern tree make_vector (unsigned, unsigned CXX_MEM_STAT_INFO);
extern tree build_vector_from_ctor (tree, vec<constructor_elt, va_gc> *);
extern tree build_vector_from_val (tree, tree);
extern tree build_all_ones_cst (tree);
extern tree build_zero_cst (tree);
extern tree build_string (int, const char *);
+extern tree build_poly_int_cst (tree, const poly_wide_int_ref &);
extern tree build_tree_list (tree, tree CXX_MEM_STAT_INFO);
extern tree build_tree_list_vec (const vec<tree, va_gc> * CXX_MEM_STAT_INFO);
extern tree build_decl (location_t, enum tree_code,
extern tree build_index_type (tree);
extern tree build_array_type (tree, tree, bool = false);
extern tree build_nonshared_array_type (tree, tree);
-extern tree build_array_type_nelts (tree, unsigned HOST_WIDE_INT);
+extern tree build_array_type_nelts (tree, poly_uint64);
extern tree build_function_type (tree, tree);
extern tree build_function_type_list (tree, ...);
extern tree build_varargs_function_type_list (tree, ...);
extern int tree_int_cst_equal (const_tree, const_tree);
-extern bool tree_fits_shwi_p (const_tree)
- ATTRIBUTE_PURE;
-extern bool tree_fits_uhwi_p (const_tree)
- ATTRIBUTE_PURE;
+extern bool tree_fits_shwi_p (const_tree) ATTRIBUTE_PURE;
+extern bool tree_fits_poly_int64_p (const_tree) ATTRIBUTE_PURE;
+extern bool tree_fits_uhwi_p (const_tree) ATTRIBUTE_PURE;
+extern bool tree_fits_poly_uint64_p (const_tree) ATTRIBUTE_PURE;
extern HOST_WIDE_INT tree_to_shwi (const_tree);
+extern poly_int64 tree_to_poly_int64 (const_tree);
extern unsigned HOST_WIDE_INT tree_to_uhwi (const_tree);
+extern poly_uint64 tree_to_poly_uint64 (const_tree);
#if !defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 4003)
extern inline __attribute__ ((__gnu_inline__)) HOST_WIDE_INT
tree_to_shwi (const_tree t)
gcc_assert (tree_fits_uhwi_p (t));
return TREE_INT_CST_LOW (t);
}
+#if NUM_POLY_INT_COEFFS == 1
+extern inline __attribute__ ((__gnu_inline__)) poly_int64
+tree_to_poly_int64 (const_tree t)
+{
+ gcc_assert (tree_fits_poly_int64_p (t));
+ return TREE_INT_CST_LOW (t);
+}
+
+extern inline __attribute__ ((__gnu_inline__)) poly_uint64
+tree_to_poly_uint64 (const_tree t)
+{
+ gcc_assert (tree_fits_poly_uint64_p (t));
+ return TREE_INT_CST_LOW (t);
+}
+#endif
#endif
extern int tree_int_cst_sgn (const_tree);
extern int tree_int_cst_sign_bit (const_tree);
extern tree excess_precision_type (tree);
extern bool valid_constant_size_p (const_tree);
+/* Return true if T holds a value that can be represented as a poly_int64
+ without loss of precision. Store the value in *VALUE if so. */
+
+inline bool
+poly_int_tree_p (const_tree t, poly_int64_pod *value)
+{
+ if (tree_fits_poly_int64_p (t))
+ {
+ *value = tree_to_poly_int64 (t);
+ return true;
+ }
+ return false;
+}
+
+/* Return true if T holds a value that can be represented as a poly_uint64
+ without loss of precision. Store the value in *VALUE if so. */
+
+inline bool
+poly_int_tree_p (const_tree t, poly_uint64_pod *value)
+{
+ if (tree_fits_poly_uint64_p (t))
+ {
+ *value = tree_to_poly_uint64 (t);
+ return true;
+ }
+ return false;
+}
/* From expmed.c. Since rtl.h is included after tree.h, we can't
put the prototype here. Rtl.h does declare the prototype if
&& COMPLETE_TYPE_P (TREE_TYPE (type)));
}
+/* Return true if the value of T could be represented as a poly_widest_int. */
+
+inline bool
+poly_int_tree_p (const_tree t)
+{
+ return (TREE_CODE (t) == INTEGER_CST || POLY_INT_CST_P (t));
+}
+
extern tree strip_float_extensions (tree);
extern int really_constant_p (const_tree);
+extern bool ptrdiff_tree_p (const_tree, poly_int64_pod *);
extern bool decl_address_invariant_p (const_tree);
extern bool decl_address_ip_invariant_p (const_tree);
extern bool int_fits_type_p (const_tree, const_tree);
/* The tree and const_tree overload templates. */
namespace wi
{
+ class unextended_tree
+ {
+ private:
+ const_tree m_t;
+
+ public:
+ unextended_tree () {}
+ unextended_tree (const_tree t) : m_t (t) {}
+
+ unsigned int get_precision () const;
+ const HOST_WIDE_INT *get_val () const;
+ unsigned int get_len () const;
+ const_tree get_tree () const { return m_t; }
+ };
+
+ template <>
+ struct int_traits <unextended_tree>
+ {
+ static const enum precision_type precision_type = VAR_PRECISION;
+ static const bool host_dependent_precision = false;
+ static const bool is_sign_extended = false;
+ };
+
template <int N>
class extended_tree
{
const_tree m_t;
public:
+ extended_tree () {}
extended_tree (const_tree);
unsigned int get_precision () const;
const HOST_WIDE_INT *get_val () const;
unsigned int get_len () const;
+ const_tree get_tree () const { return m_t; }
};
template <int N>
static const unsigned int precision = N;
};
- typedef const generic_wide_int <extended_tree <WIDE_INT_MAX_PRECISION> >
- tree_to_widest_ref;
- typedef const generic_wide_int <extended_tree <ADDR_MAX_PRECISION> >
- tree_to_offset_ref;
+ typedef extended_tree <WIDE_INT_MAX_PRECISION> widest_extended_tree;
+ typedef extended_tree <ADDR_MAX_PRECISION> offset_extended_tree;
+
+ typedef const generic_wide_int <widest_extended_tree> tree_to_widest_ref;
+ typedef const generic_wide_int <offset_extended_tree> tree_to_offset_ref;
typedef const generic_wide_int<wide_int_ref_storage<false, false> >
tree_to_wide_ref;
tree_to_offset_ref to_offset (const_tree);
tree_to_wide_ref to_wide (const_tree);
wide_int to_wide (const_tree, unsigned int);
+
+ typedef const poly_int <NUM_POLY_INT_COEFFS,
+ generic_wide_int <widest_extended_tree> >
+ tree_to_poly_widest_ref;
+ typedef const poly_int <NUM_POLY_INT_COEFFS,
+ generic_wide_int <offset_extended_tree> >
+ tree_to_poly_offset_ref;
+ typedef const poly_int <NUM_POLY_INT_COEFFS,
+ generic_wide_int <unextended_tree> >
+ tree_to_poly_wide_ref;
+
+ tree_to_poly_widest_ref to_poly_widest (const_tree);
+ tree_to_poly_offset_ref to_poly_offset (const_tree);
+ tree_to_poly_wide_ref to_poly_wide (const_tree);
+
+ template <int N>
+ struct ints_for <generic_wide_int <extended_tree <N> >, CONST_PRECISION>
+ {
+ typedef generic_wide_int <extended_tree <N> > extended;
+ static extended zero (const extended &);
+ };
+
+ template <>
+ struct ints_for <generic_wide_int <unextended_tree>, VAR_PRECISION>
+ {
+ typedef generic_wide_int <unextended_tree> unextended;
+ static unextended zero (const unextended &);
+ };
}
/* Refer to INTEGER_CST T as though it were a widest_int.
gcc_unreachable ();
}
+inline unsigned int
+wi::unextended_tree::get_precision () const
+{
+ return TYPE_PRECISION (TREE_TYPE (m_t));
+}
+
+inline const HOST_WIDE_INT *
+wi::unextended_tree::get_val () const
+{
+ return &TREE_INT_CST_ELT (m_t, 0);
+}
+
+inline unsigned int
+wi::unextended_tree::get_len () const
+{
+ return TREE_INT_CST_NUNITS (m_t);
+}
+
+/* Return the value of a POLY_INT_CST in its native precision. */
+
+inline wi::tree_to_poly_wide_ref
+poly_int_cst_value (const_tree x)
+{
+ poly_int <NUM_POLY_INT_COEFFS, generic_wide_int <wi::unextended_tree> > res;
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ res.coeffs[i] = POLY_INT_CST_COEFF (x, i);
+ return res;
+}
+
+/* Access INTEGER_CST or POLY_INT_CST tree T as if it were a
+ poly_widest_int. See wi::to_widest for more details. */
+
+inline wi::tree_to_poly_widest_ref
+wi::to_poly_widest (const_tree t)
+{
+ if (POLY_INT_CST_P (t))
+ {
+ poly_int <NUM_POLY_INT_COEFFS,
+ generic_wide_int <widest_extended_tree> > res;
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ res.coeffs[i] = POLY_INT_CST_COEFF (t, i);
+ return res;
+ }
+ return t;
+}
+
+/* Access INTEGER_CST or POLY_INT_CST tree T as if it were a
+ poly_offset_int. See wi::to_offset for more details. */
+
+inline wi::tree_to_poly_offset_ref
+wi::to_poly_offset (const_tree t)
+{
+ if (POLY_INT_CST_P (t))
+ {
+ poly_int <NUM_POLY_INT_COEFFS,
+ generic_wide_int <offset_extended_tree> > res;
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ res.coeffs[i] = POLY_INT_CST_COEFF (t, i);
+ return res;
+ }
+ return t;
+}
+
+/* Access INTEGER_CST or POLY_INT_CST tree T as if it were a
+ poly_wide_int. See wi::to_wide for more details. */
+
+inline wi::tree_to_poly_wide_ref
+wi::to_poly_wide (const_tree t)
+{
+ if (POLY_INT_CST_P (t))
+ return poly_int_cst_value (t);
+ return t;
+}
+
+template <int N>
+inline generic_wide_int <wi::extended_tree <N> >
+wi::ints_for <generic_wide_int <wi::extended_tree <N> >,
+ wi::CONST_PRECISION>::zero (const extended &x)
+{
+ return build_zero_cst (TREE_TYPE (x.get_tree ()));
+}
+
+inline generic_wide_int <wi::unextended_tree>
+wi::ints_for <generic_wide_int <wi::unextended_tree>,
+ wi::VAR_PRECISION>::zero (const unextended &x)
+{
+ return build_zero_cst (TREE_TYPE (x.get_tree ()));
+}
+
namespace wi
{
template <typename T>
bool
wi::fits_to_boolean_p (const T &x, const_tree type)
{
- return eq_p (x, 0) || eq_p (x, TYPE_UNSIGNED (type) ? 1 : -1);
+ typedef typename poly_int_traits<T>::int_type int_type;
+ return (known_eq (x, int_type (0))
+ || known_eq (x, int_type (TYPE_UNSIGNED (type) ? 1 : -1)));
}
template <typename T>
return fits_to_boolean_p (x, type);
if (TYPE_UNSIGNED (type))
- return eq_p (x, zext (x, TYPE_PRECISION (type)));
+ return known_eq (x, zext (x, TYPE_PRECISION (type)));
else
- return eq_p (x, sext (x, TYPE_PRECISION (type)));
+ return known_eq (x, sext (x, TYPE_PRECISION (type)));
}
/* Produce the smallest number that is represented in TYPE. The precision
DEFTREESTRUCT(TS_TYPED, "typed")
DEFTREESTRUCT(TS_COMMON, "common")
DEFTREESTRUCT(TS_INT_CST, "integer cst")
+DEFTREESTRUCT(TS_POLY_INT_CST, "poly_int_cst")
DEFTREESTRUCT(TS_REAL_CST, "real cst")
DEFTREESTRUCT(TS_FIXED_CST, "fixed cst")
DEFTREESTRUCT(TS_VECTOR, "vector")