+2004-09-27 Mark Mitchell <mark@codesourcery.com>
+
+ PR c++/17642
+ * stor-layout.c (layout_decl): Use fold_convert, not convert.
+ (bit_from_pos): Likewise.
+ (byte_from_pos): Likewise.
+ (pos_from_bit): Likewise.
+ (normalize_offset): Likewise.
+ (place_field): Likewise.
+ (finalize_type_size): Likewise.
+ (layout_type): Likewise.
+ * tree.c (build_index_type): Likewise.
+
2004-09-27 Devang Patel <dpatel@apple.com>
* expr.c (expand_expr_real_1): Handle VEC_COND_EXPR.
+2004-09-27 Mark Mitchell <mark@codesourcery.com>
+
+ PR c++/17642
+ * cp-tree.h (fold_if_not_in_template): New function.
+ * call.c (build_conditional_expr): Use fold_if_not_in_template.
+ (build_cxx_call): Likewise.
+ * cvt.c (convert_to_complex): Likewise.
+ (ocp_convert): Likewise.
+ (convert): Likewise.
+ (convert_force): Likewise.
+ * decl.c (compute_array_index_type): Clear
+ processing_template_decl while folding array bounds.
+ * pt.c (convert_nontype_argument): Clear
+ processing_template_decl while processing non-type argument
+ initialization.
+ * tree.c (fold_if_not_in_template): New function.
+ * typeck.c (build_class_member_access_expr): Use
+ fold_if_not_in_template.
+ (build_array_ref): Likewise.
+ (build_binary_op): Likewise. Do not try to optimize computations
+ when processing templates.
+ (cp_pointer_int_sum): Use fold_if_not_in_template.
+ (pointer_diff): Likewise.
+ (build_unary_op): Likewise.
+ (build_reinterpret_cast): Likewise.
+ (get_delta_difference): Likewise.
+ (expand_ptrmemfunc_cst): Likewise.
+ (dubious_conversion_warnings): Likewise.
+
2004-09-27 Matt Austern <austern@apple.com>
* cp/parser.c (struct cp_token): new one-bit field , implicit_extern_c
}
valid_operands:
- result = fold (build3 (COND_EXPR, result_type, arg1, arg2, arg3));
+ result = fold_if_not_in_template (build3 (COND_EXPR, result_type, arg1,
+ arg2, arg3));
/* We can't use result_type below, as fold might have returned a
throw_expr. */
/* Some built-in function calls will be evaluated at compile-time in
fold (). */
- fn = fold (fn);
+ fn = fold_if_not_in_template (fn);
if (VOID_TYPE_P (TREE_TYPE (fn)))
return fn;
extern int cp_is_overload_p (tree);
extern int cp_auto_var_in_fn_p (tree,tree);
extern void cp_update_decl_after_saving (tree, void *);
+extern tree fold_if_not_in_template (tree);
/* in typeck.c */
extern int string_conv_p (tree, tree, int);
/* For complex data types, we need to perform componentwise
conversion. */
else if (TREE_CODE (type) == COMPLEX_TYPE)
- return fold (convert_to_complex (type, e));
+ return fold_if_not_in_template (convert_to_complex (type, e));
else if (TREE_CODE (e) == TARGET_EXPR)
{
/* Don't build a NOP_EXPR of class type. Instead, change the
/* We shouldn't be treating objects of ADDRESSABLE type as
rvalues. */
gcc_assert (!TREE_ADDRESSABLE (type));
- return fold (build1 (NOP_EXPR, type, e));
+ return fold_if_not_in_template (build_nop (type, e));
}
}
if (code == BOOLEAN_TYPE)
return cp_truthvalue_conversion (e);
- return fold (convert_to_integer (type, e));
+ return fold_if_not_in_template (convert_to_integer (type, e));
}
if (POINTER_TYPE_P (type) || TYPE_PTR_TO_MEMBER_P (type))
- return fold (cp_convert_to_pointer (type, e, false));
+ return fold_if_not_in_template (cp_convert_to_pointer (type, e, false));
if (code == VECTOR_TYPE)
{
tree in_vtype = TREE_TYPE (e);
error ("`%#T' used where a `%T' was expected", in_vtype, type);
return error_mark_node;
}
- return fold (convert_to_vector (type, e));
+ return fold_if_not_in_template (convert_to_vector (type, e));
}
if (code == REAL_TYPE || code == COMPLEX_TYPE)
{
TREE_TYPE (e));
}
if (code == REAL_TYPE)
- return fold (convert_to_real (type, e));
+ return fold_if_not_in_template (convert_to_real (type, e));
else if (code == COMPLEX_TYPE)
- return fold (convert_to_complex (type, e));
+ return fold_if_not_in_template (convert_to_complex (type, e));
}
/* New C++ semantics: since assignment is now based on
if (POINTER_TYPE_P (type) && POINTER_TYPE_P (intype))
{
expr = decl_constant_value (expr);
- return fold (build1 (NOP_EXPR, type, expr));
+ return fold_if_not_in_template (build_nop (type, expr));
}
return ocp_convert (type, expr, CONV_OLD_CONVERT,
enum tree_code code = TREE_CODE (type);
if (code == REFERENCE_TYPE)
- return fold (convert_to_reference (type, e, CONV_C_CAST, LOOKUP_COMPLAIN,
- NULL_TREE));
+ return (fold_if_not_in_template
+ (convert_to_reference (type, e, CONV_C_CAST, LOOKUP_COMPLAIN,
+ NULL_TREE)));
else if (TREE_CODE (TREE_TYPE (e)) == REFERENCE_TYPE)
e = convert_from_reference (e);
if (code == POINTER_TYPE)
- return fold (convert_to_pointer_force (type, e));
+ return fold_if_not_in_template (convert_to_pointer_force (type, e));
/* From typeck.c convert_for_assignment */
if (((TREE_CODE (TREE_TYPE (e)) == POINTER_TYPE && TREE_CODE (e) == ADDR_EXPR
itype = build_min (MINUS_EXPR, sizetype, size, integer_one_node);
else
{
+ HOST_WIDE_INT saved_processing_template_decl;
+
/* Compute the index of the largest element in the array. It is
- one less than the number of elements in the array. */
- itype
- = fold (cp_build_binary_op (MINUS_EXPR,
- cp_convert (ssizetype, size),
- cp_convert (ssizetype, integer_one_node)));
+ one less than the number of elements in the array. We save
+ and restore PROCESSING_TEMPLATE_DECL so that computations in
+ cp_build_binary_op will be appropriately folded. */
+ saved_processing_template_decl = processing_template_decl;
+ processing_template_decl = 0;
+ itype = cp_build_binary_op (MINUS_EXPR,
+ cp_convert (ssizetype, size),
+ cp_convert (ssizetype, integer_one_node));
+ itype = fold (itype);
+ processing_template_decl = saved_processing_template_decl;
+
if (!TREE_CONSTANT (itype))
/* A variable sized array. */
itype = variable_size (itype);
switch (TREE_CODE (type))
{
+ HOST_WIDE_INT saved_processing_template_decl;
+
case INTEGER_TYPE:
case BOOLEAN_TYPE:
case ENUMERAL_TYPE:
return error_mark_node;
/* It's safe to call digest_init in this case; we know we're
- just converting one integral constant expression to another. */
+ just converting one integral constant expression to another.
+ */
+ saved_processing_template_decl = processing_template_decl;
+ processing_template_decl = 0;
expr = digest_init (type, expr, (tree*) 0);
+ processing_template_decl = saved_processing_template_decl;
if (TREE_CODE (expr) != INTEGER_CST)
/* Curiously, some TREE_CONSTANT integral expressions do not
return true;
}
+/* Like "fold", but should be used whenever we might be processing the
+ body of a template. */
+
+tree
+fold_if_not_in_template (tree expr)
+{
+ /* In the body of a template, there is never any need to call
+ "fold". We will call fold later when actually instantiating the
+ template. Integral constant expressions in templates will be
+ evaluted via fold_non_dependent_expr, as necessary. */
+ return (processing_template_decl ? expr : fold (expr));
+}
+
\f
#if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
/* Complain that some language-specific thing hanging off a tree
member_type = cp_build_qualified_type (member_type, type_quals);
}
- result = fold (build3 (COMPONENT_REF, member_type, object, member,
- NULL_TREE));
+ result = build3 (COMPONENT_REF, member_type, object, member,
+ NULL_TREE);
+ result = fold_if_not_in_template (result);
/* Mark the expression const or volatile, as appropriate. Even
though we've dealt with the type above, we still have to mark the
|= (CP_TYPE_VOLATILE_P (type) | TREE_SIDE_EFFECTS (array));
TREE_THIS_VOLATILE (rval)
|= (CP_TYPE_VOLATILE_P (type) | TREE_THIS_VOLATILE (array));
- return require_complete_type (fold (rval));
+ return require_complete_type (fold_if_not_in_template (rval));
}
{
convert it to this type. */
tree final_type = 0;
+ tree result;
+
/* Nonzero if this is an operation like MIN or MAX which can
safely be computed in short if both args are promoted shorts.
Also implies COMMON.
/* Nonzero means set RESULT_TYPE to the common type of the args. */
int common = 0;
+ /* True if both operands have arithmetic type. */
+ bool arithmetic_types_p;
+
/* Apply default conversions. */
op0 = orig_op0;
op1 = orig_op1;
break;
}
- if ((code0 == INTEGER_TYPE || code0 == REAL_TYPE || code0 == COMPLEX_TYPE)
- &&
- (code1 == INTEGER_TYPE || code1 == REAL_TYPE || code1 == COMPLEX_TYPE))
+ arithmetic_types_p =
+ ((code0 == INTEGER_TYPE || code0 == REAL_TYPE || code0 == COMPLEX_TYPE)
+ && (code1 == INTEGER_TYPE || code1 == REAL_TYPE
+ || code1 == COMPLEX_TYPE));
+ /* Determine the RESULT_TYPE, if it is not already known. */
+ if (!result_type
+ && arithmetic_types_p
+ && (shorten || common || short_compare))
+ result_type = common_type (type0, type1);
+
+ if (!result_type)
{
- int none_complex = (code0 != COMPLEX_TYPE && code1 != COMPLEX_TYPE);
+ error ("invalid operands of types `%T' and `%T' to binary `%O'",
+ TREE_TYPE (orig_op0), TREE_TYPE (orig_op1), code);
+ return error_mark_node;
+ }
- if (shorten || common || short_compare)
- result_type = common_type (type0, type1);
+ /* If we're in a template, the only thing we need to know is the
+ RESULT_TYPE. */
+ if (processing_template_decl)
+ return build2 (resultcode, result_type, op0, op1);
+
+ if (arithmetic_types_p)
+ {
+ int none_complex = (code0 != COMPLEX_TYPE && code1 != COMPLEX_TYPE);
/* For certain operations (which identify themselves by shorten != 0)
if both args were extended from the same smaller type,
}
}
- /* At this point, RESULT_TYPE must be nonzero to avoid an error message.
- If CONVERTED is zero, both args will be converted to type RESULT_TYPE.
+ /* If CONVERTED is zero, both args will be converted to type RESULT_TYPE.
Then the expression will be built.
It will be given type FINAL_TYPE if that is nonzero;
otherwise, it will be given type RESULT_TYPE. */
- if (!result_type)
- {
- error ("invalid operands of types `%T' and `%T' to binary `%O'",
- TREE_TYPE (orig_op0), TREE_TYPE (orig_op1), code);
- return error_mark_node;
- }
-
/* Issue warnings about peculiar, but valid, uses of NULL. */
if (/* It's reasonable to use pointer values as operands of &&
and ||, so NULL is no exception. */
if (build_type == NULL_TREE)
build_type = result_type;
- {
- tree result = fold (build2 (resultcode, build_type, op0, op1));
- if (final_type != 0)
- result = cp_convert (final_type, result);
- return result;
- }
+ result = build2 (resultcode, build_type, op0, op1);
+ result = fold_if_not_in_template (result);
+ if (final_type != 0)
+ result = cp_convert (final_type, result);
+ return result;
}
\f
/* Return a tree for the sum or difference (RESULTCODE says which)
pointer_int_sum() anyway. */
complete_type (TREE_TYPE (res_type));
- return pointer_int_sum (resultcode, ptrop, fold (intop));
+ return pointer_int_sum (resultcode, ptrop,
+ fold_if_not_in_template (intop));
}
/* Return a tree for the difference of pointers OP0 and OP1.
/* Do the division. */
result = build2 (EXACT_DIV_EXPR, restype, op0, cp_convert (restype, op1));
- return fold (result);
+ return fold_if_not_in_template (result);
}
\f
/* Construct and perhaps optimize a tree representation
if (TREE_CODE (arg) == COMPLEX_CST)
return TREE_REALPART (arg);
else if (TREE_CODE (TREE_TYPE (arg)) == COMPLEX_TYPE)
- return fold (build1 (REALPART_EXPR, TREE_TYPE (TREE_TYPE (arg)), arg));
+ {
+ arg = build1 (REALPART_EXPR, TREE_TYPE (TREE_TYPE (arg)), arg);
+ return fold_if_not_in_template (arg);
+ }
else
return arg;
if (TREE_CODE (arg) == COMPLEX_CST)
return TREE_IMAGPART (arg);
else if (TREE_CODE (TREE_TYPE (arg)) == COMPLEX_TYPE)
- return fold (build1 (IMAGPART_EXPR, TREE_TYPE (TREE_TYPE (arg)), arg));
+ {
+ arg = build1 (IMAGPART_EXPR, TREE_TYPE (TREE_TYPE (arg)), arg);
+ return fold_if_not_in_template (arg);
+ }
else
return cp_convert (TREE_TYPE (arg), integer_zero_node);
{
if (argtype == 0)
argtype = TREE_TYPE (arg);
- return fold (build1 (code, argtype, arg));
+ return fold_if_not_in_template (build1 (code, argtype, arg));
}
error ("%s", errstring);
|| (TYPE_PTRMEMFUNC_P (type) && TYPE_PTRMEMFUNC_P (intype)))
{
expr = decl_constant_value (expr);
- return fold (build1 (NOP_EXPR, type, expr));
+ return fold_if_not_in_template (build_nop (type, expr));
}
else if ((TYPE_PTRMEM_P (type) && TYPE_PTRMEM_P (intype))
|| (TYPE_PTROBV_P (type) && TYPE_PTROBV_P (intype)))
{
check_for_casting_away_constness (intype, type, "reinterpret_cast");
expr = decl_constant_value (expr);
- return fold (build1 (NOP_EXPR, type, expr));
+ return fold_if_not_in_template (build_nop (type, expr));
}
else if ((TYPE_PTRFN_P (type) && TYPE_PTROBV_P (intype))
|| (TYPE_PTRFN_P (intype) && TYPE_PTROBV_P (type)))
{
pedwarn ("ISO C++ forbids casting between pointer-to-function and pointer-to-object");
expr = decl_constant_value (expr);
- return fold (build1 (NOP_EXPR, type, expr));
+ return fold_if_not_in_template (build_nop (type, expr));
}
else
{
}
}
- return fold (convert_to_integer (ptrdiff_type_node, result));
+ return fold_if_not_in_template (convert_to_integer (ptrdiff_type_node,
+ result));
}
/* Return a constructor for the pointer-to-member-function TYPE using
fn; the call will do the opposite adjustment. */
tree orig_class = DECL_CONTEXT (fn);
tree binfo = binfo_or_else (orig_class, fn_class);
- *delta = fold (build2 (PLUS_EXPR, TREE_TYPE (*delta),
- *delta, BINFO_OFFSET (binfo)));
+ *delta = build2 (PLUS_EXPR, TREE_TYPE (*delta),
+ *delta, BINFO_OFFSET (binfo));
+ *delta = fold_if_not_in_template (*delta);
/* We set PFN to the vtable offset at which the function can be
found, plus one (unless ptrmemfunc_vbit_in_delta, in which
case delta is shifted left, and then incremented). */
*pfn = DECL_VINDEX (fn);
- *pfn = fold (build2 (MULT_EXPR, integer_type_node, *pfn,
- TYPE_SIZE_UNIT (vtable_entry_type)));
+ *pfn = build2 (MULT_EXPR, integer_type_node, *pfn,
+ TYPE_SIZE_UNIT (vtable_entry_type));
+ *pfn = fold_if_not_in_template (*pfn);
switch (TARGET_PTRMEMFUNC_VBIT_LOCATION)
{
case ptrmemfunc_vbit_in_pfn:
- *pfn = fold (build2 (PLUS_EXPR, integer_type_node, *pfn,
- integer_one_node));
+ *pfn = build2 (PLUS_EXPR, integer_type_node, *pfn,
+ integer_one_node);
+ *pfn = fold_if_not_in_template (*pfn);
break;
case ptrmemfunc_vbit_in_delta:
- *delta = fold (build2 (LSHIFT_EXPR, TREE_TYPE (*delta),
- *delta, integer_one_node));
- *delta = fold (build2 (PLUS_EXPR, TREE_TYPE (*delta),
- *delta, integer_one_node));
+ *delta = build2 (LSHIFT_EXPR, TREE_TYPE (*delta),
+ *delta, integer_one_node);
+ *delta = fold_if_not_in_template (*delta);
+ *delta = build2 (PLUS_EXPR, TREE_TYPE (*delta),
+ *delta, integer_one_node);
+ *delta = fold_if_not_in_template (*delta);
break;
default:
gcc_unreachable ();
}
- *pfn = fold (build1 (NOP_EXPR, TYPE_PTRMEMFUNC_FN_TYPE (type),
- *pfn));
+ *pfn = build_nop (TYPE_PTRMEMFUNC_FN_TYPE (type), *pfn);
+ *pfn = fold_if_not_in_template (*pfn);
}
}
overflow_warning (expr);
if (TREE_CONSTANT (expr))
- expr = fold (expr);
+ expr = fold_if_not_in_template (expr);
}
return expr;
}
}
else if (DECL_SIZE_UNIT (decl) == 0)
DECL_SIZE_UNIT (decl)
- = convert (sizetype, size_binop (CEIL_DIV_EXPR, DECL_SIZE (decl),
- bitsize_unit_node));
+ = fold_convert (sizetype, size_binop (CEIL_DIV_EXPR, DECL_SIZE (decl),
+ bitsize_unit_node));
if (code != FIELD_DECL)
/* For non-fields, update the alignment from the type. */
bit_from_pos (tree offset, tree bitpos)
{
return size_binop (PLUS_EXPR, bitpos,
- size_binop (MULT_EXPR, convert (bitsizetype, offset),
+ size_binop (MULT_EXPR,
+ fold_convert (bitsizetype, offset),
bitsize_unit_node));
}
byte_from_pos (tree offset, tree bitpos)
{
return size_binop (PLUS_EXPR, offset,
- convert (sizetype,
- size_binop (TRUNC_DIV_EXPR, bitpos,
- bitsize_unit_node)));
+ fold_convert (sizetype,
+ size_binop (TRUNC_DIV_EXPR, bitpos,
+ bitsize_unit_node)));
}
void
tree pos)
{
*poffset = size_binop (MULT_EXPR,
- convert (sizetype,
- size_binop (FLOOR_DIV_EXPR, pos,
- bitsize_int (off_align))),
+ fold_convert (sizetype,
+ size_binop (FLOOR_DIV_EXPR, pos,
+ bitsize_int (off_align))),
size_int (off_align / BITS_PER_UNIT));
*pbitpos = size_binop (FLOOR_MOD_EXPR, pos, bitsize_int (off_align));
}
*poffset
= size_binop (PLUS_EXPR, *poffset,
- size_binop (MULT_EXPR, convert (sizetype, extra_aligns),
+ size_binop (MULT_EXPR,
+ fold_convert (sizetype, extra_aligns),
size_int (off_align / BITS_PER_UNIT)));
*pbitpos
/* First adjust OFFSET by the partial bits, then align. */
rli->offset
= size_binop (PLUS_EXPR, rli->offset,
- convert (sizetype,
- size_binop (CEIL_DIV_EXPR, rli->bitpos,
- bitsize_unit_node)));
+ fold_convert (sizetype,
+ size_binop (CEIL_DIV_EXPR, rli->bitpos,
+ bitsize_unit_node)));
rli->bitpos = bitsize_zero_node;
rli->offset = round_up (rli->offset, desired_align / BITS_PER_UNIT);
{
rli->offset
= size_binop (PLUS_EXPR, rli->offset,
- convert (sizetype,
- size_binop (CEIL_DIV_EXPR, rli->bitpos,
- bitsize_unit_node)));
+ fold_convert (sizetype,
+ size_binop (CEIL_DIV_EXPR, rli->bitpos,
+ bitsize_unit_node)));
rli->offset
= size_binop (PLUS_EXPR, rli->offset, DECL_SIZE_UNIT (field));
rli->bitpos = bitsize_zero_node;
result will fit in sizetype. We will get more efficient code using
sizetype, so we force a conversion. */
TYPE_SIZE_UNIT (type)
- = convert (sizetype,
- size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (type),
- bitsize_unit_node));
+ = fold_convert (sizetype,
+ size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (type),
+ bitsize_unit_node));
if (TYPE_SIZE (type) != 0)
{
/* The initial subtraction should happen in the original type so
that (possible) negative values are handled appropriately. */
length = size_binop (PLUS_EXPR, size_one_node,
- convert (sizetype,
- fold (build2 (MINUS_EXPR,
- TREE_TYPE (lb),
- ub, lb))));
+ fold_convert (sizetype,
+ fold (build2 (MINUS_EXPR,
+ TREE_TYPE (lb),
+ ub, lb))));
/* Special handling for arrays of bits (for Chill). */
element_size = TYPE_SIZE (element);
length = size_binop (MAX_EXPR, length, size_zero_node);
TYPE_SIZE (type) = size_binop (MULT_EXPR, element_size,
- convert (bitsizetype, length));
+ fold_convert (bitsizetype,
+ length));
/* If we know the size of the element, calculate the total
size directly, rather than do some division thing below.
PR fortran/16938
* gfortran.dg/pr16938.f90: New test.
+2004-09-27 Mark Mitchell <mark@codesourcery.com>
+
+ * g++.dg/template/crash23.C: New test.
+
2004-09-27 Mark Mitchell <mark@codesourcery.com>
PR c++/17585
--- /dev/null
+// PR c++/17642
+
+template<int dim>
+int f(const int* const lsh, const int* const bbox, const int* const nghostzones, int d)
+{
+ for (int d=0; d<dim; ++d)
+ lsh[d] - (bbox[2*d+1] ? 0 : nghostzones[d]);
+}
+
TREE_TYPE (itype) = sizetype;
TYPE_PRECISION (itype) = TYPE_PRECISION (sizetype);
TYPE_MIN_VALUE (itype) = size_zero_node;
- TYPE_MAX_VALUE (itype) = convert (sizetype, maxval);
+ TYPE_MAX_VALUE (itype) = fold_convert (sizetype, maxval);
TYPE_MODE (itype) = TYPE_MODE (sizetype);
TYPE_SIZE (itype) = TYPE_SIZE (sizetype);
TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (sizetype);