/* Handle initialization things in C++.
- Copyright (C) 1987-2017 Free Software Foundation, Inc.
+ Copyright (C) 1987-2020 Free Software Foundation, Inc.
Contributed by Michael Tiemann (tiemann@cygnus.com)
This file is part of GCC.
#include "stringpool.h"
#include "attribs.h"
#include "asan.h"
+#include "stor-layout.h"
static bool begin_init_stmts (tree *, tree *);
static tree finish_init_stmts (bool, tree, tree);
-- if T is a scalar type, the storage is set to the value of zero
converted to T.
- -- if T is a non-union class type, the storage for each nonstatic
+ -- if T is a non-union class type, the storage for each non-static
data member and each base-class subobject is zero-initialized.
-- if T is a union type, the storage for its first data member is
;
else if (TYPE_PTR_OR_PTRMEM_P (type))
init = fold (convert (type, nullptr_node));
+ else if (NULLPTR_TYPE_P (type))
+ init = build_int_cst (type, 0);
else if (SCALAR_TYPE_P (type))
init = fold (convert (type, integer_zero_node));
else if (RECORD_OR_UNION_CODE_P (TREE_CODE (type)))
else if (VECTOR_TYPE_P (type))
init = build_zero_cst (type);
else
- gcc_assert (TREE_CODE (type) == REFERENCE_TYPE);
+ {
+ gcc_assert (TYPE_REF_P (type));
+ init = build_zero_cst (type);
+ }
/* In all cases, the initializer is a constant. */
if (init)
gcc_assert (!processing_template_decl
|| (SCALAR_TYPE_P (type) || TREE_CODE (type) == ARRAY_TYPE));
- if (CLASS_TYPE_P (type)
- && type_build_ctor_call (type))
+ if (CLASS_TYPE_P (type) && type_build_ctor_call (type))
{
- tree ctor =
- build_special_member_call (NULL_TREE, complete_ctor_identifier,
- NULL, type, LOOKUP_NORMAL,
- complain);
- if (ctor == error_mark_node)
+ tree ctor
+ = build_special_member_call (NULL_TREE, complete_ctor_identifier,
+ NULL, type, LOOKUP_NORMAL, complain);
+ if (ctor == error_mark_node || TREE_CONSTANT (ctor))
return ctor;
tree fn = NULL_TREE;
if (TREE_CODE (ctor) == CALL_EXPR)
if (ftype == error_mark_node)
continue;
+ /* Ignore flexible array members for value initialization. */
+ if (TREE_CODE (ftype) == ARRAY_TYPE
+ && !COMPLETE_TYPE_P (ftype)
+ && !TYPE_DOMAIN (ftype)
+ && COMPLETE_TYPE_P (TREE_TYPE (ftype))
+ && (next_initializable_field (DECL_CHAIN (field))
+ == NULL_TREE))
+ continue;
+
/* We could skip vfields and fields of types with
user-defined constructors, but I think that won't improve
performance at all; it should be simpler in general just
error ("value-initialization of function type %qT", type);
return error_mark_node;
}
- else if (TREE_CODE (type) == REFERENCE_TYPE)
+ else if (TYPE_REF_P (type))
{
if (complain & tf_error)
error ("value-initialization of reference type %qT", type);
tf_warning_or_error));
if (type_build_dtor_call (type))
{
- tree expr = build_delete (type, decl, sfk_complete_destructor,
+ tree expr = build_delete (input_location,
+ type, decl, sfk_complete_destructor,
LOOKUP_NORMAL
|LOOKUP_NONVIRTUAL
|LOOKUP_DESTRUCTOR,
/* Return the non-static data initializer for FIELD_DECL MEMBER. */
-static GTY((cache)) tree_cache_map *nsdmi_inst;
+static GTY((cache)) decl_tree_cache_map *nsdmi_inst;
tree
get_nsdmi (tree member, bool in_ctor, tsubst_flags_t complain)
{
init = DECL_INITIAL (DECL_TI_TEMPLATE (member));
location_t expr_loc
- = EXPR_LOC_OR_LOC (init, DECL_SOURCE_LOCATION (member));
- tree *slot;
- if (TREE_CODE (init) == DEFAULT_ARG)
+ = cp_expr_loc_or_loc (init, DECL_SOURCE_LOCATION (member));
+ if (TREE_CODE (init) == DEFERRED_PARSE)
/* Unparsed. */;
- else if (nsdmi_inst && (slot = nsdmi_inst->get (member)))
+ else if (tree *slot = hash_map_safe_get (nsdmi_inst, member))
init = *slot;
/* Check recursive instantiation. */
else if (DECL_INSTANTIATING_NSDMI_P (member))
}
else
{
- int un = cp_unevaluated_operand;
- cp_unevaluated_operand = 0;
+ cp_evaluated ev;
location_t sloc = input_location;
input_location = expr_loc;
DECL_INSTANTIATING_NSDMI_P (member) = 1;
- inject_this_parameter (DECL_CONTEXT (member), TYPE_UNQUALIFIED);
+ bool pushed = false;
+ tree ctx = DECL_CONTEXT (member);
+ if (!currently_open_class (ctx)
+ && !LOCAL_CLASS_P (ctx))
+ {
+ push_to_top_level ();
+ push_nested_class (ctx);
+ pushed = true;
+ }
+
+ gcc_checking_assert (!processing_template_decl);
+
+ inject_this_parameter (ctx, TYPE_UNQUALIFIED);
start_lambda_scope (member);
DECL_INSTANTIATING_NSDMI_P (member) = 0;
if (init != error_mark_node)
+ hash_map_safe_put<hm_ggc> (nsdmi_inst, member, init);
+
+ if (pushed)
{
- if (!nsdmi_inst)
- nsdmi_inst = tree_cache_map::create_ggc (37);
- nsdmi_inst->put (member, init);
+ pop_nested_class ();
+ pop_from_top_level ();
}
input_location = sloc;
- cp_unevaluated_operand = un;
}
}
else
init = DECL_INITIAL (member);
- if (init && TREE_CODE (init) == DEFAULT_ARG)
+ if (init && TREE_CODE (init) == DEFERRED_PARSE)
{
if (complain & tf_error)
{
bool simple_target = (init && SIMPLE_TARGET_EXPR_P (init));
if (simple_target)
init = TARGET_EXPR_INITIAL (init);
- init = break_out_target_exprs (init);
+ init = break_out_target_exprs (init, /*loc*/true);
+ if (in_ctor && init && TREE_CODE (init) == TARGET_EXPR)
+ /* This expresses the full initialization, prevent perform_member_init from
+ calling another constructor (58162). */
+ TARGET_EXPR_DIRECT_INIT_P (init) = true;
if (simple_target && TREE_CODE (init) != CONSTRUCTOR)
/* Now put it back so C++17 copy elision works. */
init = get_target_expr (init);
return true;
}
+/* If INIT's value can come from a call to std::initializer_list<T>::begin,
+ return that function. Otherwise, NULL_TREE. */
+
+static tree
+find_list_begin (tree init)
+{
+ STRIP_NOPS (init);
+ while (TREE_CODE (init) == COMPOUND_EXPR)
+ init = TREE_OPERAND (init, 1);
+ STRIP_NOPS (init);
+ if (TREE_CODE (init) == COND_EXPR)
+ {
+ tree left = TREE_OPERAND (init, 1);
+ if (!left)
+ left = TREE_OPERAND (init, 0);
+ left = find_list_begin (left);
+ if (left)
+ return left;
+ return find_list_begin (TREE_OPERAND (init, 2));
+ }
+ if (TREE_CODE (init) == CALL_EXPR)
+ if (tree fn = get_callee_fndecl (init))
+ if (id_equal (DECL_NAME (fn), "begin")
+ && is_std_init_list (DECL_CONTEXT (fn)))
+ return fn;
+ return NULL_TREE;
+}
+
+/* If INIT initializing MEMBER is copying the address of the underlying array
+ of an initializer_list, warn. */
+
+static void
+maybe_warn_list_ctor (tree member, tree init)
+{
+ tree memtype = TREE_TYPE (member);
+ if (!init || !TYPE_PTR_P (memtype)
+ || !is_list_ctor (current_function_decl))
+ return;
+
+ tree parms = FUNCTION_FIRST_USER_PARMTYPE (current_function_decl);
+ tree initlist = non_reference (TREE_VALUE (parms));
+ tree targs = CLASSTYPE_TI_ARGS (initlist);
+ tree elttype = TREE_VEC_ELT (targs, 0);
+
+ if (!same_type_ignoring_top_level_qualifiers_p
+ (TREE_TYPE (memtype), elttype))
+ return;
+
+ tree begin = find_list_begin (init);
+ if (!begin)
+ return;
+
+ location_t loc = cp_expr_loc_or_input_loc (init);
+ warning_at (loc, OPT_Winit_list_lifetime,
+ "initializing %qD from %qE does not extend the lifetime "
+ "of the underlying array", member, begin);
+}
+
/* Initialize MEMBER, a FIELD_DECL, with INIT, a TREE_LIST of
arguments. If TREE_LIST is void_type_node, an empty initializer
list was given; if NULL_TREE no initializer was given. */
member);
}
+ if (array_of_unknown_bound_p (type))
+ {
+ maybe_reject_flexarray_init (member, init);
+ return;
+ }
+
+ if (init && TREE_CODE (init) == TREE_LIST
+ && (DIRECT_LIST_INIT_P (TREE_VALUE (init))
+ /* FIXME C++20 parenthesized aggregate init (PR 92812). */
+ || !(/* cxx_dialect >= cxx20 ? CP_AGGREGATE_TYPE_P (type) */
+ /* : */CLASS_TYPE_P (type))))
+ init = build_x_compound_expr_from_list (init, ELK_MEM_INIT,
+ tf_warning_or_error);
+
if (init == void_type_node)
{
/* mem() means value-initialization. */
}
}
else if (init
- && (TREE_CODE (type) == REFERENCE_TYPE
- /* Pre-digested NSDMI. */
- || (((TREE_CODE (init) == CONSTRUCTOR
- && TREE_TYPE (init) == type)
- /* { } mem-initializer. */
- || (TREE_CODE (init) == TREE_LIST
- && DIRECT_LIST_INIT_P (TREE_VALUE (init))))
+ && (TYPE_REF_P (type)
+ || (TREE_CODE (init) == CONSTRUCTOR
&& (CP_AGGREGATE_TYPE_P (type)
|| is_std_init_list (type)))))
{
reference member in a constructor’s ctor-initializer (12.6.2)
persists until the constructor exits." */
unsigned i; tree t;
- vec<tree, va_gc> *cleanups = make_tree_vector ();
- if (TREE_CODE (init) == TREE_LIST)
- init = build_x_compound_expr_from_list (init, ELK_MEM_INIT,
- tf_warning_or_error);
- if (TREE_TYPE (init) != type)
+ releasing_vec cleanups;
+ if (!same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (init), type))
{
if (BRACE_ENCLOSED_INITIALIZER_P (init)
&& CP_AGGREGATE_TYPE_P (type))
}
if (init == error_mark_node)
return;
+ if (DECL_SIZE (member) && integer_zerop (DECL_SIZE (member))
+ && !TREE_SIDE_EFFECTS (init))
+ /* Don't add trivial initialization of an empty base/field, as they
+ might not be ordered the way the back-end expects. */
+ return;
/* A FIELD_DECL doesn't really have a suitable lifetime, but
make_temporary_var_for_ref_to_temp will treat it as automatic and
set_up_extended_ref_temp wants to use the decl in a warning. */
finish_expr_stmt (init);
FOR_EACH_VEC_ELT (*cleanups, i, t)
push_cleanup (decl, t, false);
- release_tree_vector (cleanups);
}
else if (type_build_ctor_call (type)
|| (init && CLASS_TYPE_P (strip_array_types (type))))
{
if (TREE_CODE (type) == ARRAY_TYPE)
{
- if (init)
- {
- /* Check to make sure the member initializer is valid and
- something like a CONSTRUCTOR in: T a[] = { 1, 2 } and
- if it isn't, return early to avoid triggering another
- error below. */
- if (maybe_reject_flexarray_init (member, init))
- return;
-
- if (TREE_CODE (init) != TREE_LIST || TREE_CHAIN (init))
- init = error_mark_node;
- else
- init = TREE_VALUE (init);
-
- if (BRACE_ENCLOSED_INITIALIZER_P (init))
- init = digest_init (type, init, tf_warning_or_error);
- }
if (init == NULL_TREE
|| same_type_ignoring_top_level_qualifiers_p (type,
TREE_TYPE (init)))
{
/* TYPE_NEEDS_CONSTRUCTING can be set just because we have a
vtable; still give this diagnostic. */
+ auto_diagnostic_group d;
if (permerror (DECL_SOURCE_LOCATION (current_function_decl),
"uninitialized const member in %q#T", type))
inform (DECL_SOURCE_LOCATION (member),
{
tree core_type;
/* member traversal: note it leaves init NULL */
- if (TREE_CODE (type) == REFERENCE_TYPE)
+ if (TYPE_REF_P (type))
{
+ auto_diagnostic_group d;
if (permerror (DECL_SOURCE_LOCATION (current_function_decl),
"uninitialized reference member in %q#T", type))
inform (DECL_SOURCE_LOCATION (member),
}
else if (CP_TYPE_CONST_P (type))
{
+ auto_diagnostic_group d;
if (permerror (DECL_SOURCE_LOCATION (current_function_decl),
"uninitialized const member in %q#T", type))
inform (DECL_SOURCE_LOCATION (member),
/*using_new=*/false,
/*complain=*/true);
}
- else if (TREE_CODE (init) == TREE_LIST)
- /* There was an explicit member initialization. Do some work
- in that case. */
- init = build_x_compound_expr_from_list (init, ELK_MEM_INIT,
- tf_warning_or_error);
- /* Reject a member initializer for a flexible array member. */
- if (init && !maybe_reject_flexarray_init (member, init))
+ maybe_warn_list_ctor (member, init);
+
+ if (init)
finish_expr_stmt (cp_build_modify_expr (input_location, decl,
INIT_EXPR, init,
tf_warning_or_error));
/*access_path=*/NULL_TREE,
/*preserve_reference=*/false,
tf_warning_or_error);
- expr = build_delete (type, expr, sfk_complete_destructor,
+ expr = build_delete (input_location,
+ type, expr, sfk_complete_destructor,
LOOKUP_NONVIRTUAL|LOOKUP_DESTRUCTOR, 0,
tf_warning_or_error);
/* Record the initialization. */
TREE_VALUE (subobject_init) = TREE_VALUE (init);
+ /* Carry over the dummy TREE_TYPE node containing the source location. */
+ TREE_TYPE (subobject_init) = TREE_TYPE (init);
next_subobject = subobject_init;
}
/* Initialize the data members. */
while (mem_inits)
{
+ /* If this initializer was explicitly provided, then the dummy TREE_TYPE
+ node contains the source location. */
+ iloc_sentinel ils (EXPR_LOCATION (TREE_TYPE (mem_inits)));
+
perform_member_init (TREE_PURPOSE (mem_inits),
TREE_VALUE (mem_inits));
mem_inits = TREE_CHAIN (mem_inits);
return error_mark_node;
location_t init_loc = (init
- ? EXPR_LOC_OR_LOC (init, input_location)
+ ? cp_expr_loc_or_input_loc (init)
: location_of (exp));
TREE_READONLY (exp) = 0;
if (VAR_P (exp) && DECL_DECOMPOSITION_P (exp))
{
from_array = 1;
- if (init && DECL_P (init)
+ init = mark_rvalue_use (init);
+ if (init
+ && DECL_P (tree_strip_any_location_wrapper (init))
&& !(flags & LOOKUP_ONLYCONVERTING))
{
/* Wrap the initializer in a CONSTRUCTOR so that build_vec_init
}
else
{
- /* An array may not be initialized use the parenthesized
- initialization form -- unless the initializer is "()". */
- if (init && TREE_CODE (init) == TREE_LIST)
- {
- if (complain & tf_error)
- error ("bad array initializer");
- return error_mark_node;
- }
/* Must arrange to initialize each element of EXP
from elements of INIT. */
if (cv_qualified_p (type))
from_array = (itype && same_type_p (TREE_TYPE (init),
TREE_TYPE (exp)));
- if (init && !from_array
- && !BRACE_ENCLOSED_INITIALIZER_P (init))
+ if (init && !BRACE_ENCLOSED_INITIALIZER_P (init)
+ && (!from_array
+ || (TREE_CODE (init) != CONSTRUCTOR
+ /* Can happen, eg, handling the compound-literals
+ extension (ext/complit12.C). */
+ && TREE_CODE (init) != TARGET_EXPR)))
{
if (complain & tf_error)
- permerror (init_loc, "array must be initialized "
- "with a brace-enclosed initializer");
- else
- return error_mark_node;
+ error_at (init_loc, "array must be initialized "
+ "with a brace-enclosed initializer");
+ return error_mark_node;
}
}
&& !DIRECT_LIST_INIT_P (init))
flags |= LOOKUP_ONLYCONVERTING;
- if ((VAR_P (exp) || TREE_CODE (exp) == PARM_DECL)
- && !lookup_attribute ("warn_unused", TYPE_ATTRIBUTES (type)))
- /* Just know that we've seen something for this node. */
- TREE_USED (exp) = 1;
-
is_global = begin_init_stmts (&stmt_expr, &compound_stmt);
destroy_temps = stmts_are_full_exprs_p ();
current_stmt_tree ()->stmts_are_full_exprs_p = 0;
TREE_READONLY (exp) = was_const;
TREE_THIS_VOLATILE (exp) = was_volatile;
+ if ((VAR_P (exp) || TREE_CODE (exp) == PARM_DECL)
+ && TREE_SIDE_EFFECTS (stmt_expr)
+ && !lookup_attribute ("warn_unused", TYPE_ATTRIBUTES (type)))
+ /* Just know that we've seen something for this node. */
+ TREE_USED (exp) = 1;
+
return stmt_expr;
}
}
if (init && TREE_CODE (init) != TREE_LIST
- && (flags & LOOKUP_ONLYCONVERTING))
+ && (flags & LOOKUP_ONLYCONVERTING)
+ && !unsafe_return_slot_p (exp))
{
/* Base subobjects should only get direct-initialization. */
gcc_assert (true_exp == exp);
tree elt; unsigned i;
/* Unshare the arguments for the second call. */
- vec<tree, va_gc> *parms2 = make_tree_vector ();
+ releasing_vec parms2;
FOR_EACH_VEC_SAFE_ELT (parms, i, elt)
{
elt = break_out_target_exprs (elt);
&parms2, binfo, flags,
complain);
complete = fold_build_cleanup_point_expr (void_type_node, complete);
- release_tree_vector (parms2);
base = build_special_member_call (exp, base_ctor_identifier,
&parms, binfo, flags,
/* If the type has data but no user-provided ctor, we need to zero
out the object. */
if (!type_has_user_provided_constructor (type)
- && !is_really_empty_class (type))
+ && !is_really_empty_class (type, /*ignore_vptr*/true))
{
tree field_size = NULL_TREE;
if (exp != true_exp && CLASSTYPE_AS_BASE (type) != type)
/* If MEMBER is non-static, then the program has fallen afoul of
[expr.prim]:
- An id-expression that denotes a nonstatic data member or
- nonstatic member function of a class can only be used:
+ An id-expression that denotes a non-static data member or
+ non-static member function of a class can only be used:
-- as part of a class member access (_expr.ref_) in which the
object-expression refers to the member's class or a class
-- to form a pointer to member (_expr.unary.op_), or
- -- in the body of a nonstatic member function of that class or
- of a class derived from that class (_class.mfct.nonstatic_), or
+ -- in the body of a non-static member function of that class or
+ of a class derived from that class (_class.mfct.non-static_), or
-- in a mem-initializer for a constructor for that class or for
a class derived from that class (_class.base.init_). */
recursively); otherwise, return DECL. If STRICT_P, the
initializer is only returned if DECL is a
constant-expression. If RETURN_AGGREGATE_CST_OK_P, it is ok to
- return an aggregate constant. */
+ return an aggregate constant. If UNSHARE_P, return an unshared
+ copy of the initializer. */
static tree
-constant_value_1 (tree decl, bool strict_p, bool return_aggregate_cst_ok_p)
+constant_value_1 (tree decl, bool strict_p, bool return_aggregate_cst_ok_p,
+ bool unshare_p)
{
while (TREE_CODE (decl) == CONST_DECL
|| decl_constant_var_p (decl)
|| TREE_CODE (init) == STRING_CST)))
break;
/* Don't return a CONSTRUCTOR for a variable with partial run-time
- initialization, since it doesn't represent the entire value. */
- if (TREE_CODE (init) == CONSTRUCTOR
+ initialization, since it doesn't represent the entire value.
+ Similarly for VECTOR_CSTs created by cp_folding those
+ CONSTRUCTORs. */
+ if ((TREE_CODE (init) == CONSTRUCTOR
+ || TREE_CODE (init) == VECTOR_CST)
&& !DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (decl))
break;
/* If the variable has a dynamic initializer, don't use its
&& !DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (decl)
&& DECL_NONTRIVIALLY_INITIALIZED_P (decl))
break;
- decl = unshare_expr (init);
+ decl = init;
}
- return decl;
+ return unshare_p ? unshare_expr (decl) : decl;
}
/* If DECL is a CONST_DECL, or a constant VAR_DECL initialized by constant
scalar_constant_value (tree decl)
{
return constant_value_1 (decl, /*strict_p=*/true,
- /*return_aggregate_cst_ok_p=*/false);
+ /*return_aggregate_cst_ok_p=*/false,
+ /*unshare_p=*/true);
}
-/* Like scalar_constant_value, but can also return aggregate initializers. */
+/* Like scalar_constant_value, but can also return aggregate initializers.
+ If UNSHARE_P, return an unshared copy of the initializer. */
tree
-decl_really_constant_value (tree decl)
+decl_really_constant_value (tree decl, bool unshare_p /*= true*/)
{
return constant_value_1 (decl, /*strict_p=*/true,
- /*return_aggregate_cst_ok_p=*/true);
+ /*return_aggregate_cst_ok_p=*/true,
+ /*unshare_p=*/unshare_p);
}
-/* A more relaxed version of scalar_constant_value, used by the
+/* A more relaxed version of decl_really_constant_value, used by the
common C/C++ code. */
tree
-decl_constant_value (tree decl)
+decl_constant_value (tree decl, bool unshare_p)
{
return constant_value_1 (decl, /*strict_p=*/processing_template_decl,
- /*return_aggregate_cst_ok_p=*/true);
+ /*return_aggregate_cst_ok_p=*/true,
+ /*unshare_p=*/unshare_p);
+}
+
+tree
+decl_constant_value (tree decl)
+{
+ return decl_constant_value (decl, /*unshare_p=*/true);
}
\f
/* Common subroutines of build_new and build_vec_delete. */
creates and returns a NEW_EXPR. */
static tree
-build_raw_new_expr (vec<tree, va_gc> *placement, tree type, tree nelts,
- vec<tree, va_gc> *init, int use_global_new)
+build_raw_new_expr (location_t loc, vec<tree, va_gc> *placement, tree type,
+ tree nelts, vec<tree, va_gc> *init, int use_global_new)
{
tree init_list;
tree new_expr;
else
init_list = build_tree_list_vec (init);
- new_expr = build4 (NEW_EXPR, build_pointer_type (type),
- build_tree_list_vec (placement), type, nelts,
- init_list);
+ new_expr = build4_loc (loc, NEW_EXPR, build_pointer_type (type),
+ build_tree_list_vec (placement), type, nelts,
+ init_list);
NEW_EXPR_USE_GLOBAL (new_expr) = use_global_new;
TREE_SIDE_EFFECTS (new_expr) = 1;
if (type_has_user_provided_constructor (field_type))
continue;
- if (TREE_CODE (field_type) == REFERENCE_TYPE)
+ if (TYPE_REF_P (field_type))
{
++ error_count;
if (complain)
static void
warn_placement_new_too_small (tree type, tree nelts, tree size, tree oper)
{
- location_t loc = EXPR_LOC_OR_LOC (oper, input_location);
+ location_t loc = cp_expr_loc_or_input_loc (oper);
/* The number of bytes to add to or subtract from the size of the provided
buffer based on an offset into an array or an array element reference.
Otherwise, use the size of the entire array as an optimistic
estimate (this may lead to false negatives). */
tree adj = TREE_OPERAND (oper, 1);
+ adj = fold_for_warn (adj);
if (CONSTANT_CLASS_P (adj))
adjust += wi::to_offset (convert (ssizetype, adj));
else
tree op0 = oper;
while (TREE_CODE (op0 = TREE_OPERAND (op0, 0)) == COMPONENT_REF);
+ STRIP_ANY_LOCATION_WRAPPER (op0);
if (VAR_P (op0))
var_decl = op0;
oper = TREE_OPERAND (oper, 1);
}
+ STRIP_ANY_LOCATION_WRAPPER (oper);
tree opertype = TREE_TYPE (oper);
- if ((addr_expr || !POINTER_TYPE_P (opertype))
+ if ((addr_expr || !INDIRECT_TYPE_P (opertype))
&& (VAR_P (oper)
|| TREE_CODE (oper) == FIELD_DECL
|| TREE_CODE (oper) == PARM_DECL))
others. */
offset_int bytes_need;
+ if (nelts)
+ nelts = fold_for_warn (nelts);
+
if (CONSTANT_CLASS_P (size))
bytes_need = wi::to_offset (size);
else if (nelts && CONSTANT_CLASS_P (nelts))
}
/* Determine whether an allocation function is a namespace-scope
- non-replaceable placement new function. See DR 1748.
- TODO: Enable in all standard modes. */
+ non-replaceable placement new function. See DR 1748. */
static bool
std_placement_new_fn_p (tree alloc_fn)
{
return false;
}
+/* For element type ELT_TYPE, return the appropriate type of the heap object
+ containing such element(s). COOKIE_SIZE is NULL or the size of cookie
+ in bytes. FULL_SIZE is NULL if it is unknown how big the heap allocation
+ will be, otherwise size of the heap object. If COOKIE_SIZE is NULL,
+ return array type ELT_TYPE[FULL_SIZE / sizeof(ELT_TYPE)], otherwise return
+ struct { size_t[COOKIE_SIZE/sizeof(size_t)]; ELT_TYPE[N]; }
+ where N is nothing (flexible array member) if FULL_SIZE is NULL, otherwise
+ it is computed such that the size of the struct fits into FULL_SIZE. */
+
+tree
+build_new_constexpr_heap_type (tree elt_type, tree cookie_size, tree full_size)
+{
+ gcc_assert (cookie_size == NULL_TREE || tree_fits_uhwi_p (cookie_size));
+ gcc_assert (full_size == NULL_TREE || tree_fits_uhwi_p (full_size));
+ unsigned HOST_WIDE_INT csz = cookie_size ? tree_to_uhwi (cookie_size) : 0;
+ tree itype2 = NULL_TREE;
+ if (full_size)
+ {
+ unsigned HOST_WIDE_INT fsz = tree_to_uhwi (full_size);
+ gcc_assert (fsz >= csz);
+ fsz -= csz;
+ fsz /= int_size_in_bytes (elt_type);
+ itype2 = build_index_type (size_int (fsz - 1));
+ if (!cookie_size)
+ return build_cplus_array_type (elt_type, itype2);
+ }
+ else
+ gcc_assert (cookie_size);
+ csz /= int_size_in_bytes (sizetype);
+ tree itype1 = build_index_type (size_int (csz - 1));
+ tree atype1 = build_cplus_array_type (sizetype, itype1);
+ tree atype2 = build_cplus_array_type (elt_type, itype2);
+ tree rtype = cxx_make_type (RECORD_TYPE);
+ TYPE_NAME (rtype) = heap_identifier;
+ tree fld1 = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE, atype1);
+ tree fld2 = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE, atype2);
+ DECL_FIELD_CONTEXT (fld1) = rtype;
+ DECL_FIELD_CONTEXT (fld2) = rtype;
+ DECL_ARTIFICIAL (fld1) = true;
+ DECL_ARTIFICIAL (fld2) = true;
+ TYPE_FIELDS (rtype) = fld1;
+ DECL_CHAIN (fld1) = fld2;
+ layout_type (rtype);
+ return rtype;
+}
+
+/* Help the constexpr code to find the right type for the heap variable
+ by adding a NOP_EXPR around ALLOC_CALL if needed for cookie_size.
+ Return ALLOC_CALL or ALLOC_CALL cast to a pointer to
+ struct { size_t[cookie_size/sizeof(size_t)]; elt_type[]; }. */
+
+static tree
+maybe_wrap_new_for_constexpr (tree alloc_call, tree elt_type, tree cookie_size)
+{
+ if (cxx_dialect < cxx20)
+ return alloc_call;
+
+ if (current_function_decl != NULL_TREE
+ && !DECL_DECLARED_CONSTEXPR_P (current_function_decl))
+ return alloc_call;
+
+ tree call_expr = extract_call_expr (alloc_call);
+ if (call_expr == error_mark_node)
+ return alloc_call;
+
+ tree alloc_call_fndecl = cp_get_callee_fndecl_nofold (call_expr);
+ if (alloc_call_fndecl == NULL_TREE
+ || !IDENTIFIER_NEW_OP_P (DECL_NAME (alloc_call_fndecl))
+ || CP_DECL_CONTEXT (alloc_call_fndecl) != global_namespace)
+ return alloc_call;
+
+ tree rtype = build_new_constexpr_heap_type (elt_type, cookie_size,
+ NULL_TREE);
+ return build_nop (build_pointer_type (rtype), alloc_call);
+}
+
/* Generate code for a new-expression, including calling the "operator
new" function, initializing the object, and, if an exception occurs
during construction, cleaning up. The arguments are as for
outer_nelts_from_type = true;
}
- /* Lots of logic below. depends on whether we have a constant number of
+ /* Lots of logic below depends on whether we have a constant number of
elements, so go ahead and fold it now. */
- if (outer_nelts)
- outer_nelts = maybe_constant_value (outer_nelts);
+ const_tree cst_outer_nelts = fold_non_dependent_expr (outer_nelts, complain);
/* If our base type is an array, then make sure we know how many elements
it has. */
tree inner_nelts_cst = maybe_constant_value (inner_nelts);
if (TREE_CODE (inner_nelts_cst) == INTEGER_CST)
{
- bool overflow;
+ wi::overflow_type overflow;
offset_int result = wi::mul (wi::to_offset (inner_nelts_cst),
inner_nelts_count, SIGNED, &overflow);
if (overflow)
{
if (complain & tf_error)
{
- error_at (EXPR_LOC_OR_LOC (inner_nelts, input_location),
+ error_at (cp_expr_loc_or_input_loc (inner_nelts),
"array size in new-expression must be constant");
cxx_constant_value(inner_nelts);
}
complain);
}
+ if (!verify_type_context (input_location, TCTX_ALLOCATION, elt_type,
+ !(complain & tf_error)))
+ return error_mark_node;
+
if (variably_modified_type_p (elt_type, NULL_TREE) && (complain & tf_error))
{
error ("variably modified type not allowed in new-expression");
/* Warn if we performed the (T[N]) to T[N] transformation and N is
variable. */
if (outer_nelts_from_type
- && !TREE_CONSTANT (outer_nelts))
+ && !TREE_CONSTANT (cst_outer_nelts))
{
if (complain & tf_warning_or_error)
{
- pedwarn (EXPR_LOC_OR_LOC (outer_nelts, input_location), OPT_Wvla,
+ pedwarn (cp_expr_loc_or_input_loc (outer_nelts), OPT_Wvla,
typedef_variant_p (orig_type)
? G_("non-constant array new length must be specified "
- "directly, not by typedef")
+ "directly, not by %<typedef%>")
: G_("non-constant array new length must be specified "
"without parentheses around the type-id"));
}
if (VOID_TYPE_P (elt_type))
{
if (complain & tf_error)
- error ("invalid type %<void%> for new");
+ error ("invalid type %<void%> for %<new%>");
return error_mark_node;
}
+ if (is_std_init_list (elt_type))
+ warning (OPT_Winit_list_lifetime,
+ "%<new%> of %<initializer_list%> does not "
+ "extend the lifetime of the underlying array");
+
if (abstract_virtuals_error_sfinae (ACU_NEW, elt_type, complain))
return error_mark_node;
maximum object size and is safe even if we choose not to use
a cookie after all. */
max_size -= wi::to_offset (cookie_size);
- bool overflow;
+ wi::overflow_type overflow;
inner_size = wi::mul (wi::to_offset (size), inner_nelts_count, SIGNED,
&overflow);
if (overflow || wi::gtu_p (inner_size, max_size))
{
if (complain & tf_error)
- error ("size of array is too large");
+ {
+ cst_size_error error;
+ if (overflow)
+ error = cst_size_overflow;
+ else
+ {
+ error = cst_size_too_big;
+ size = size_binop (MULT_EXPR, size,
+ wide_int_to_tree (sizetype,
+ inner_nelts_count));
+ size = cp_fully_fold (size);
+ }
+ invalid_array_size_error (input_location, error, size,
+ /*name=*/NULL_TREE);
+ }
return error_mark_node;
}
size = size_binop (MULT_EXPR, size, fold_convert (sizetype, nelts));
- if (INTEGER_CST == TREE_CODE (outer_nelts))
+ if (TREE_CODE (cst_outer_nelts) == INTEGER_CST)
{
- if (tree_int_cst_lt (max_outer_nelts_tree, outer_nelts))
+ if (tree_int_cst_lt (max_outer_nelts_tree, cst_outer_nelts))
{
/* When the array size is constant, check it at compile time
to make sure it doesn't exceed the implementation-defined
isn't explicitly stated but it's enforced anyway -- see
grokdeclarator in cp/decl.c). */
if (complain & tf_error)
- error ("size of array is too large");
+ {
+ size = cp_fully_fold (size);
+ invalid_array_size_error (input_location, cst_size_too_big,
+ size, NULL_TREE);
+ }
return error_mark_node;
}
}
/* Create the argument list. */
vec_safe_insert (*placement, 0, size);
/* Do name-lookup to find the appropriate operator. */
- fns = lookup_fnfields (elt_type, fnname, /*protect=*/2);
+ fns = lookup_fnfields (elt_type, fnname, /*protect=*/2, complain);
if (fns == NULL_TREE)
{
if (complain & tf_error)
|| CP_DECL_CONTEXT (alloc_fn) == global_namespace)
&& !aligned_allocation_fn_p (alloc_fn))
{
+ auto_diagnostic_group d;
if (warning (OPT_Waligned_new_, "%<new%> of type %qT with extended "
"alignment %d", elt_type, TYPE_ALIGN_UNIT (elt_type)))
{
}
}
+ tree alloc_call_expr = extract_call_expr (alloc_call);
+ if (TREE_CODE (alloc_call_expr) == CALL_EXPR)
+ CALL_FROM_NEW_OR_DELETE_P (alloc_call_expr) = 1;
+
+ if (cookie_size)
+ alloc_call = maybe_wrap_new_for_constexpr (alloc_call, elt_type,
+ cookie_size);
+
/* In the simple case, we can stop now. */
pointer_type = build_pointer_type (type);
if (!cookie_size && !is_initialized)
explicit_value_init_p = true;
}
- if (processing_template_decl && explicit_value_init_p)
+ if (processing_template_decl)
{
+ /* Avoid an ICE when converting to a base in build_simple_base_path.
+ We'll throw this all away anyway, and build_new will create
+ a NEW_EXPR. */
+ tree t = fold_convert (build_pointer_type (elt_type), data_addr);
/* build_value_init doesn't work in templates, and we don't need
the initializer anyway since we're going to throw it away and
rebuild it at instantiation time, so just build up a single
constructor call to get any appropriate diagnostics. */
- init_expr = cp_build_fold_indirect_ref (data_addr);
+ init_expr = cp_build_fold_indirect_ref (t);
if (type_build_ctor_call (elt_type))
init_expr = build_special_member_call (init_expr,
complete_ctor_identifier,
else if (*init)
{
if (complain & tf_error)
- permerror (input_location,
- "parenthesized initializer in array new");
- else
- return error_mark_node;
- vecinit = build_tree_list_vec (*init);
+ error ("parenthesized initializer in array new");
+ return error_mark_node;
}
init_expr
= build_vec_init (data_addr,
complete_ctor_identifier,
init, elt_type,
LOOKUP_NORMAL,
- complain);
+ complain|tf_no_cleanup);
}
else if (explicit_value_init_p)
{
tree ie;
/* We are processing something like `new int (10)', which
- means allocate an int, and initialize it with 10. */
+ means allocate an int, and initialize it with 10.
- ie = build_x_compound_expr_from_vec (*init, "new initializer",
- complain);
+ In C++20, also handle `new A(1, 2)'. */
+ if (cxx_dialect >= cxx20
+ && AGGREGATE_TYPE_P (type)
+ && (*init)->length () > 1)
+ {
+ ie = build_tree_list_vec (*init);
+ ie = build_constructor_from_list (init_list_type_node, ie);
+ CONSTRUCTOR_IS_DIRECT_INIT (ie) = true;
+ CONSTRUCTOR_IS_PAREN_INIT (ie) = true;
+ ie = digest_init (type, ie, complain);
+ }
+ else
+ ie = build_x_compound_expr_from_vec (*init, "new initializer",
+ complain);
init_expr = cp_build_modify_expr (input_location, init_expr,
INIT_EXPR, ie, complain);
}
rather than just "new". This may change PLACEMENT and INIT. */
tree
-build_new (vec<tree, va_gc> **placement, tree type, tree nelts,
- vec<tree, va_gc> **init, int use_global_new, tsubst_flags_t complain)
+build_new (location_t loc, vec<tree, va_gc> **placement, tree type,
+ tree nelts, vec<tree, va_gc> **init, int use_global_new,
+ tsubst_flags_t complain)
{
tree rval;
vec<tree, va_gc> *orig_placement = NULL;
if (auto_node)
{
tree d_init = NULL_TREE;
- if (vec_safe_length (*init) == 1)
+ const size_t len = vec_safe_length (*init);
+ /* E.g. new auto(x) must have exactly one element, or
+ a {} initializer will have one element. */
+ if (len == 1)
{
d_init = (**init)[0];
d_init = resolve_nondeduced_context (d_init, complain);
}
- type = do_auto_deduction (type, d_init, auto_node);
+ /* For the rest, e.g. new A(1, 2, 3), create a list. */
+ else if (len > 1)
+ {
+ unsigned int n;
+ tree t;
+ tree *pp = &d_init;
+ FOR_EACH_VEC_ELT (**init, n, t)
+ {
+ t = resolve_nondeduced_context (t, complain);
+ *pp = build_tree_list (NULL_TREE, t);
+ pp = &TREE_CHAIN (*pp);
+ }
+ }
+ type = do_auto_deduction (type, d_init, auto_node, complain);
}
}
|| (nelts && type_dependent_expression_p (nelts))
|| (nelts && *init)
|| any_type_dependent_arguments_p (*init))
- return build_raw_new_expr (*placement, type, nelts, *init,
+ return build_raw_new_expr (loc, *placement, type, nelts, *init,
use_global_new);
orig_placement = make_tree_vector_copy (*placement);
if (nelts)
{
+ location_t nelts_loc = cp_expr_loc_or_loc (nelts, loc);
if (!build_expr_type_conversion (WANT_INT | WANT_ENUM, nelts, false))
{
if (complain & tf_error)
- permerror (input_location, "size in array new must have integral type");
+ permerror (nelts_loc,
+ "size in array new must have integral type");
else
return error_mark_node;
}
/* Try to determine the constant value only for the purposes
of the diagnostic below but continue to use the original
value and handle const folding later. */
- const_tree cst_nelts = maybe_constant_value (nelts);
+ const_tree cst_nelts = fold_non_dependent_expr (nelts, complain);
/* The expression in a noptr-new-declarator is erroneous if it's of
non-class type and its value before converting to std::size_t is
less than zero. ... If the expression is a constant expression,
the program is ill-fomed. */
- if (INTEGER_CST == TREE_CODE (cst_nelts)
- && tree_int_cst_sgn (cst_nelts) == -1)
- {
- if (complain & tf_error)
- error ("size of array is negative");
- return error_mark_node;
- }
+ if (TREE_CODE (cst_nelts) == INTEGER_CST
+ && !valid_array_size_p (nelts_loc, cst_nelts, NULL_TREE,
+ complain & tf_error))
+ return error_mark_node;
nelts = mark_rvalue_use (nelts);
nelts = cp_save_expr (cp_convert (sizetype, nelts, complain));
/* ``A reference cannot be created by the new operator. A reference
is not an object (8.2.2, 8.4.3), so a pointer to it could not be
returned by new.'' ARM 5.3.3 */
- if (TREE_CODE (type) == REFERENCE_TYPE)
+ if (TYPE_REF_P (type))
{
if (complain & tf_error)
- error ("new cannot be applied to a reference type");
+ error_at (loc, "new cannot be applied to a reference type");
else
return error_mark_node;
type = TREE_TYPE (type);
if (TREE_CODE (type) == FUNCTION_TYPE)
{
if (complain & tf_error)
- error ("new cannot be applied to a function type");
+ error_at (loc, "new cannot be applied to a function type");
return error_mark_node;
}
if (processing_template_decl)
{
- tree ret = build_raw_new_expr (orig_placement, type, orig_nelts,
+ tree ret = build_raw_new_expr (loc, orig_placement, type, orig_nelts,
orig_init, use_global_new);
release_tree_vector (orig_placement);
release_tree_vector (orig_init);
}
/* Wrap it in a NOP_EXPR so warn_if_unused_value doesn't complain. */
- rval = build1 (NOP_EXPR, TREE_TYPE (rval), rval);
+ rval = build1_loc (loc, NOP_EXPR, TREE_TYPE (rval), rval);
TREE_NO_WARNING (rval) = 1;
return rval;
}
\f
static tree
-build_vec_delete_1 (tree base, tree maxindex, tree type,
+build_vec_delete_1 (location_t loc, tree base, tree maxindex, tree type,
special_function_kind auto_delete_vec,
int use_global_delete, tsubst_flags_t complain)
{
if (base == error_mark_node || maxindex == error_mark_node)
return error_mark_node;
+ if (!verify_type_context (loc, TCTX_DEALLOCATION, type,
+ !(complain & tf_error)))
+ return error_mark_node;
+
if (!COMPLETE_TYPE_P (type))
{
- if ((complain & tf_warning)
- && warning (OPT_Wdelete_incomplete,
- "possible problem detected in invocation of "
- "delete [] operator:"))
- {
- cxx_incomplete_type_diagnostic (base, type, DK_WARNING);
- inform (input_location, "neither the destructor nor the "
- "class-specific operator delete [] will be called, "
- "even if they are declared when the class is defined");
- }
+ if (complain & tf_warning)
+ {
+ auto_diagnostic_group d;
+ if (warning_at (loc, OPT_Wdelete_incomplete,
+ "possible problem detected in invocation of "
+ "operator %<delete []%>"))
+ {
+ cxx_incomplete_type_diagnostic (base, type, DK_WARNING);
+ inform (loc, "neither the destructor nor the "
+ "class-specific operator %<delete []%> will be called, "
+ "even if they are declared when the class is defined");
+ }
+ }
/* This size won't actually be used. */
size_exp = size_one_node;
goto no_destructor;
/* Make sure the destructor is callable. */
if (type_build_dtor_call (type))
{
- tmp = build_delete (ptype, base, sfk_complete_destructor,
+ tmp = build_delete (loc, ptype, base, sfk_complete_destructor,
LOOKUP_NORMAL|LOOKUP_DESTRUCTOR, 1,
complain);
if (tmp == error_mark_node)
fold_convert (sizetype, maxindex));
tbase = create_temporary_var (ptype);
- tbase_init
- = cp_build_modify_expr (input_location, tbase, NOP_EXPR,
- fold_build_pointer_plus_loc (input_location,
- fold_convert (ptype,
- base),
- virtual_size),
- complain);
- if (tbase_init == error_mark_node)
- return error_mark_node;
- controller = build3 (BIND_EXPR, void_type_node, tbase,
- NULL_TREE, NULL_TREE);
+ DECL_INITIAL (tbase)
+ = fold_build_pointer_plus_loc (loc, fold_convert (ptype, base),
+ virtual_size);
+ tbase_init = build_stmt (loc, DECL_EXPR, tbase);
+ controller = build3 (BIND_EXPR, void_type_node, tbase, NULL_TREE, NULL_TREE);
TREE_SIDE_EFFECTS (controller) = 1;
body = build1 (EXIT_EXPR, void_type_node,
build2 (EQ_EXPR, boolean_type_node, tbase,
fold_convert (ptype, base)));
- tmp = fold_build1_loc (input_location, NEGATE_EXPR, sizetype, size_exp);
+ tmp = fold_build1_loc (loc, NEGATE_EXPR, sizetype, size_exp);
tmp = fold_build_pointer_plus (tbase, tmp);
- tmp = cp_build_modify_expr (input_location, tbase, NOP_EXPR, tmp, complain);
+ tmp = cp_build_modify_expr (loc, tbase, NOP_EXPR, tmp, complain);
if (tmp == error_mark_node)
return error_mark_node;
- body = build_compound_expr (input_location, body, tmp);
- tmp = build_delete (ptype, tbase, sfk_complete_destructor,
+ body = build_compound_expr (loc, body, tmp);
+ tmp = build_delete (loc, ptype, tbase, sfk_complete_destructor,
LOOKUP_NORMAL|LOOKUP_DESTRUCTOR, 1,
complain);
if (tmp == error_mark_node)
return error_mark_node;
- body = build_compound_expr (input_location, body, tmp);
+ body = build_compound_expr (loc, body, tmp);
loop = build1 (LOOP_EXPR, void_type_node, body);
- loop = build_compound_expr (input_location, tbase_init, loop);
+ loop = build_compound_expr (loc, tbase_init, loop);
no_destructor:
/* Delete the storage if appropriate. */
tree cookie_size;
cookie_size = targetm.cxx.get_cookie_size (type);
- base_tbd = cp_build_binary_op (input_location,
+ base_tbd = cp_build_binary_op (loc,
MINUS_EXPR,
cp_convert (string_type_node,
base, complain),
/*placement=*/NULL_TREE,
/*alloc_fn=*/NULL_TREE,
complain);
+
+ tree deallocate_call_expr = extract_call_expr (deallocate_expr);
+ if (TREE_CODE (deallocate_call_expr) == CALL_EXPR)
+ CALL_FROM_NEW_OR_DELETE_P (deallocate_call_expr) = 1;
}
body = loop;
- if (!deallocate_expr)
+ if (deallocate_expr == error_mark_node)
+ return error_mark_node;
+ else if (!deallocate_expr)
;
else if (!body)
body = deallocate_expr;
else
- /* The delete operator mist be called, even if a destructor
+ /* The delete operator must be called, even if a destructor
throws. */
body = build2 (TRY_FINALLY_EXPR, void_type_node, body, deallocate_expr);
body = integer_zero_node;
/* Outermost wrapper: If pointer is null, punt. */
- tree cond = build2_loc (input_location, NE_EXPR, boolean_type_node, base,
+ tree cond = build2_loc (loc, NE_EXPR, boolean_type_node, base,
fold_convert (TREE_TYPE (base), nullptr_node));
/* This is a compiler generated comparison, don't emit
e.g. -Wnonnull-compare warning for it. */
TREE_NO_WARNING (cond) = 1;
- body = build3_loc (input_location, COND_EXPR, void_type_node,
+ body = build3_loc (loc, COND_EXPR, void_type_node,
cond, body, integer_zero_node);
COND_EXPR_IS_VEC_DELETE (body) = true;
body = build1 (NOP_EXPR, void_type_node, body);
tree compound_stmt;
int destroy_temps;
tree try_block = NULL_TREE;
- int num_initialized_elts = 0;
+ HOST_WIDE_INT num_initialized_elts = 0;
bool is_global;
tree obase = base;
bool xvalue = false;
bool errors = false;
- location_t loc = (init ? EXPR_LOC_OR_LOC (init, input_location)
+ location_t loc = (init ? cp_expr_loc_or_input_loc (init)
: location_of (base));
if (TREE_CODE (atype) == ARRAY_TYPE && TYPE_DOMAIN (atype))
errors = true;
if (try_const)
{
+ if (!field)
+ field = size_int (idx);
tree e = maybe_constant_init (one_init);
if (reduced_constant_expression_p (e))
{
else
from = NULL_TREE;
- if (from_array == 2)
+ if (TREE_CODE (type) == ARRAY_TYPE)
+ elt_init = build_vec_init (to, NULL_TREE, from, /*val_init*/false,
+ from_array, complain);
+ else if (from_array == 2)
elt_init = cp_build_modify_expr (input_location, to, NOP_EXPR,
from, complain);
else if (type_build_ctor_call (type))
else if (TREE_CODE (type) == ARRAY_TYPE)
{
if (init && !BRACE_ENCLOSED_INITIALIZER_P (init))
- sorry
- ("cannot initialize multi-dimensional array with initializer");
- elt_init = build_vec_init (build1 (INDIRECT_REF, type, base),
- 0, init,
- explicit_value_init_p,
- 0, complain);
+ {
+ if ((complain & tf_error))
+ error_at (loc, "array must be initialized "
+ "with a brace-enclosed initializer");
+ elt_init = error_mark_node;
+ }
+ else
+ elt_init = build_vec_init (build1 (INDIRECT_REF, type, base),
+ 0, init,
+ explicit_value_init_p,
+ 0, complain);
}
else if (explicit_value_init_p)
{
if (TREE_CODE (init) == TREE_LIST)
init = build_x_compound_expr_from_list (init, ELK_INIT,
complain);
- elt_init = build2 (INIT_EXPR, type, to, init);
+ elt_init = (init == error_mark_node
+ ? error_mark_node
+ : build2 (INIT_EXPR, type, to, init));
}
}
if (e)
{
- int max = tree_to_shwi (maxindex)+1;
- for (; num_initialized_elts < max; ++num_initialized_elts)
+ HOST_WIDE_INT last = tree_to_shwi (maxindex);
+ if (num_initialized_elts <= last)
{
tree field = size_int (num_initialized_elts);
+ if (num_initialized_elts != last)
+ field = build2 (RANGE_EXPR, sizetype, field,
+ size_int (last));
CONSTRUCTOR_APPEND_ELT (const_vec, field, e);
}
}
}
current_stmt_tree ()->stmts_are_full_exprs_p = 1;
- if (elt_init)
+ if (elt_init && !errors)
finish_expr_stmt (elt_init);
current_stmt_tree ()->stmts_are_full_exprs_p = 0;
complain);
finish_cleanup_try_block (try_block);
- e = build_vec_delete_1 (rval, m,
+ e = build_vec_delete_1 (input_location, rval, m,
inner_elt_type, sfk_complete_destructor,
/*use_global_delete=*/0, complain);
if (e == error_mark_node)
tsubst_flags_t complain)
{
tree name;
- tree fn;
switch (dtor_kind)
{
case sfk_complete_destructor:
default:
gcc_unreachable ();
}
- fn = lookup_fnfields (TREE_TYPE (exp), name, /*protect=*/2);
- return build_new_method_call (exp, fn,
- /*args=*/NULL,
- /*conversion_path=*/NULL_TREE,
- flags,
- /*fn_p=*/NULL,
- complain);
+
+ return build_special_member_call (exp, name,
+ /*args=*/NULL,
+ /*binfo=*/TREE_TYPE (exp),
+ flags,
+ complain);
}
/* Generate a call to a destructor. TYPE is the type to cast ADDR to.
flags. See cp-tree.h for more info. */
tree
-build_delete (tree otype, tree addr, special_function_kind auto_delete,
+build_delete (location_t loc, tree otype, tree addr,
+ special_function_kind auto_delete,
int flags, int use_global_delete, tsubst_flags_t complain)
{
tree expr;
if (type == error_mark_node)
return error_mark_node;
- if (TREE_CODE (type) == POINTER_TYPE)
+ if (TYPE_PTR_P (type))
type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
if (TREE_CODE (type) == ARRAY_TYPE)
if (TYPE_DOMAIN (type) == NULL_TREE)
{
if (complain & tf_error)
- error ("unknown array size in delete");
+ error_at (loc, "unknown array size in delete");
return error_mark_node;
}
- return build_vec_delete (addr, array_type_nelts (type),
+ return build_vec_delete (loc, addr, array_type_nelts (type),
auto_delete, use_global_delete, complain);
}
+ bool deleting = (auto_delete == sfk_deleting_destructor);
+ gcc_assert (deleting == !(flags & LOOKUP_DESTRUCTOR));
+
if (TYPE_PTR_P (otype))
{
addr = mark_rvalue_use (addr);
if (!VOID_TYPE_P (type))
{
complete_type (type);
+ if (deleting
+ && !verify_type_context (loc, TCTX_DEALLOCATION, type,
+ !(complain & tf_error)))
+ return error_mark_node;
+
if (!COMPLETE_TYPE_P (type))
{
- if ((complain & tf_warning)
- && warning (OPT_Wdelete_incomplete,
- "possible problem detected in invocation of "
- "delete operator:"))
+ if (complain & tf_warning)
{
- cxx_incomplete_type_diagnostic (addr, type, DK_WARNING);
- inform (input_location,
- "neither the destructor nor the class-specific "
- "operator delete will be called, even if they are "
- "declared when the class is defined");
+ auto_diagnostic_group d;
+ if (warning_at (loc, OPT_Wdelete_incomplete,
+ "possible problem detected in invocation of "
+ "%<operator delete%>"))
+ {
+ cxx_incomplete_type_diagnostic (addr, type, DK_WARNING);
+ inform (loc,
+ "neither the destructor nor the class-specific "
+ "%<operator delete%> will be called, even if "
+ "they are declared when the class is defined");
+ }
}
}
- else if (auto_delete == sfk_deleting_destructor && warn_delnonvdtor
+ else if (deleting && warn_delnonvdtor
&& MAYBE_CLASS_TYPE_P (type) && !CLASSTYPE_FINAL (type)
&& TYPE_POLYMORPHIC_P (type))
{
if (!dtor || !DECL_VINDEX (dtor))
{
if (CLASSTYPE_PURE_VIRTUALS (type))
- warning (OPT_Wdelete_non_virtual_dtor,
- "deleting object of abstract class type %qT"
- " which has non-virtual destructor"
- " will cause undefined behavior", type);
+ warning_at (loc, OPT_Wdelete_non_virtual_dtor,
+ "deleting object of abstract class type %qT"
+ " which has non-virtual destructor"
+ " will cause undefined behavior", type);
else
- warning (OPT_Wdelete_non_virtual_dtor,
- "deleting object of polymorphic class type %qT"
- " which has non-virtual destructor"
- " might cause undefined behavior", type);
+ warning_at (loc, OPT_Wdelete_non_virtual_dtor,
+ "deleting object of polymorphic class type %qT"
+ " which has non-virtual destructor"
+ " might cause undefined behavior", type);
}
}
}
- if (TREE_SIDE_EFFECTS (addr))
- addr = save_expr (addr);
/* Throw away const and volatile on target type of addr. */
addr = convert_force (build_pointer_type (type), addr, 0, complain);
addr = cp_build_addr_expr (addr, complain);
if (addr == error_mark_node)
return error_mark_node;
- if (TREE_SIDE_EFFECTS (addr))
- addr = save_expr (addr);
addr = convert_force (build_pointer_type (type), addr, 0, complain);
}
- if (TYPE_HAS_TRIVIAL_DESTRUCTOR (type))
- {
- /* Make sure the destructor is callable. */
- if (type_build_dtor_call (type))
- {
- expr = build_dtor_call (cp_build_fold_indirect_ref (addr),
- sfk_complete_destructor, flags, complain);
- if (expr == error_mark_node)
- return error_mark_node;
- }
+ if (deleting)
+ /* We will use ADDR multiple times so we must save it. */
+ addr = save_expr (addr);
- if (auto_delete != sfk_deleting_destructor)
- return void_node;
-
- return build_op_delete_call (DELETE_EXPR, addr,
- cxx_sizeof_nowarn (type),
- use_global_delete,
- /*placement=*/NULL_TREE,
- /*alloc_fn=*/NULL_TREE,
- complain);
- }
- else
+ bool virtual_p = false;
+ if (type_build_dtor_call (type))
{
- tree head = NULL_TREE;
- tree do_delete = NULL_TREE;
- tree ifexp;
-
if (CLASSTYPE_LAZY_DESTRUCTOR (type))
lazily_declare_fn (sfk_destructor, type);
+ virtual_p = DECL_VIRTUAL_P (CLASSTYPE_DESTRUCTOR (type));
+ }
- /* For `::delete x', we must not use the deleting destructor
- since then we would not be sure to get the global `operator
- delete'. */
- if (use_global_delete && auto_delete == sfk_deleting_destructor)
- {
- /* We will use ADDR multiple times so we must save it. */
- addr = save_expr (addr);
- head = get_target_expr (build_headof (addr));
- /* Delete the object. */
- do_delete = build_op_delete_call (DELETE_EXPR,
- head,
- cxx_sizeof_nowarn (type),
- /*global_p=*/true,
- /*placement=*/NULL_TREE,
- /*alloc_fn=*/NULL_TREE,
- complain);
- /* Otherwise, treat this like a complete object destructor
- call. */
- auto_delete = sfk_complete_destructor;
- }
- /* If the destructor is non-virtual, there is no deleting
- variant. Instead, we must explicitly call the appropriate
- `operator delete' here. */
- else if (!DECL_VIRTUAL_P (CLASSTYPE_DESTRUCTOR (type))
- && auto_delete == sfk_deleting_destructor)
- {
- /* We will use ADDR multiple times so we must save it. */
- addr = save_expr (addr);
- /* Build the call. */
- do_delete = build_op_delete_call (DELETE_EXPR,
- addr,
- cxx_sizeof_nowarn (type),
- /*global_p=*/false,
- /*placement=*/NULL_TREE,
- /*alloc_fn=*/NULL_TREE,
- complain);
- /* Call the complete object destructor. */
- auto_delete = sfk_complete_destructor;
- }
- else if (auto_delete == sfk_deleting_destructor
- && TYPE_GETS_REG_DELETE (type))
- {
- /* Make sure we have access to the member op delete, even though
- we'll actually be calling it from the destructor. */
- build_op_delete_call (DELETE_EXPR, addr, cxx_sizeof_nowarn (type),
- /*global_p=*/false,
- /*placement=*/NULL_TREE,
- /*alloc_fn=*/NULL_TREE,
- complain);
- }
+ tree head = NULL_TREE;
+ tree do_delete = NULL_TREE;
+ bool destroying_delete = false;
- expr = build_dtor_call (cp_build_fold_indirect_ref (addr),
- auto_delete, flags, complain);
- if (expr == error_mark_node)
- return error_mark_node;
- if (do_delete)
- /* The delete operator must be called, regardless of whether
- the destructor throws.
-
- [expr.delete]/7 The deallocation function is called
- regardless of whether the destructor for the object or some
- element of the array throws an exception. */
- expr = build2 (TRY_FINALLY_EXPR, void_type_node, expr, do_delete);
-
- /* We need to calculate this before the dtor changes the vptr. */
- if (head)
- expr = build2 (COMPOUND_EXPR, void_type_node, head, expr);
-
- if (flags & LOOKUP_DESTRUCTOR)
- /* Explicit destructor call; don't check for null pointer. */
- ifexp = integer_one_node;
- else
+ if (!deleting)
+ {
+ /* Leave do_delete null. */
+ }
+ /* For `::delete x', we must not use the deleting destructor
+ since then we would not be sure to get the global `operator
+ delete'. */
+ else if (use_global_delete)
+ {
+ head = get_target_expr (build_headof (addr));
+ /* Delete the object. */
+ do_delete = build_op_delete_call (DELETE_EXPR,
+ head,
+ cxx_sizeof_nowarn (type),
+ /*global_p=*/true,
+ /*placement=*/NULL_TREE,
+ /*alloc_fn=*/NULL_TREE,
+ complain);
+ /* Otherwise, treat this like a complete object destructor
+ call. */
+ auto_delete = sfk_complete_destructor;
+ }
+ /* If the destructor is non-virtual, there is no deleting
+ variant. Instead, we must explicitly call the appropriate
+ `operator delete' here. */
+ else if (!virtual_p)
+ {
+ /* Build the call. */
+ do_delete = build_op_delete_call (DELETE_EXPR,
+ addr,
+ cxx_sizeof_nowarn (type),
+ /*global_p=*/false,
+ /*placement=*/NULL_TREE,
+ /*alloc_fn=*/NULL_TREE,
+ complain);
+ /* Call the complete object destructor. */
+ auto_delete = sfk_complete_destructor;
+ if (do_delete != error_mark_node)
{
- /* Handle deleting a null pointer. */
- warning_sentinel s (warn_address);
- ifexp = cp_build_binary_op (input_location, NE_EXPR, addr,
- nullptr_node, complain);
- if (ifexp == error_mark_node)
- return error_mark_node;
- /* This is a compiler generated comparison, don't emit
- e.g. -Wnonnull-compare warning for it. */
- else if (TREE_CODE (ifexp) == NE_EXPR)
- TREE_NO_WARNING (ifexp) = 1;
+ tree fn = get_callee_fndecl (do_delete);
+ destroying_delete = destroying_delete_p (fn);
}
+ }
+ else if (TYPE_GETS_REG_DELETE (type))
+ {
+ /* Make sure we have access to the member op delete, even though
+ we'll actually be calling it from the destructor. */
+ build_op_delete_call (DELETE_EXPR, addr, cxx_sizeof_nowarn (type),
+ /*global_p=*/false,
+ /*placement=*/NULL_TREE,
+ /*alloc_fn=*/NULL_TREE,
+ complain);
+ }
- if (ifexp != integer_one_node)
- expr = build3 (COND_EXPR, void_type_node, ifexp, expr, void_node);
+ if (!destroying_delete && type_build_dtor_call (type))
+ expr = build_dtor_call (cp_build_fold_indirect_ref (addr),
+ auto_delete, flags, complain);
+ else
+ expr = build_trivial_dtor_call (addr);
+ if (expr == error_mark_node)
+ return error_mark_node;
+ if (!deleting)
+ {
+ protected_set_expr_location (expr, loc);
return expr;
}
+
+ if (do_delete == error_mark_node)
+ return error_mark_node;
+ else if (do_delete)
+ {
+ tree do_delete_call_expr = extract_call_expr (do_delete);
+ if (TREE_CODE (do_delete_call_expr) == CALL_EXPR)
+ CALL_FROM_NEW_OR_DELETE_P (do_delete_call_expr) = 1;
+ }
+
+ if (do_delete && !TREE_SIDE_EFFECTS (expr))
+ expr = do_delete;
+ else if (do_delete)
+ /* The delete operator must be called, regardless of whether
+ the destructor throws.
+
+ [expr.delete]/7 The deallocation function is called
+ regardless of whether the destructor for the object or some
+ element of the array throws an exception. */
+ expr = build2 (TRY_FINALLY_EXPR, void_type_node, expr, do_delete);
+
+ /* We need to calculate this before the dtor changes the vptr. */
+ if (head)
+ expr = build2 (COMPOUND_EXPR, void_type_node, head, expr);
+
+ /* Handle deleting a null pointer. */
+ warning_sentinel s (warn_address);
+ tree ifexp = cp_build_binary_op (loc, NE_EXPR, addr,
+ nullptr_node, complain);
+ ifexp = cp_fully_fold (ifexp);
+
+ if (ifexp == error_mark_node)
+ return error_mark_node;
+ /* This is a compiler generated comparison, don't emit
+ e.g. -Wnonnull-compare warning for it. */
+ else if (TREE_CODE (ifexp) == NE_EXPR)
+ TREE_NO_WARNING (ifexp) = 1;
+
+ if (!integer_nonzerop (ifexp))
+ expr = build3 (COND_EXPR, void_type_node, ifexp, expr, void_node);
+
+ protected_set_expr_location (expr, loc);
+ return expr;
}
/* At the beginning of a destructor, push cleanups that will call the
/*access_path=*/NULL_TREE,
/*preserve_reference=*/false,
tf_warning_or_error));
- expr = build_delete (this_type, this_member,
+ expr = build_delete (input_location, this_type, this_member,
sfk_complete_destructor,
LOOKUP_NONVIRTUAL|LOOKUP_DESTRUCTOR|LOOKUP_NORMAL,
0, tf_warning_or_error);
be worth bothering.) */
tree
-build_vec_delete (tree base, tree maxindex,
+build_vec_delete (location_t loc, tree base, tree maxindex,
special_function_kind auto_delete_vec,
int use_global_delete, tsubst_flags_t complain)
{
base = TARGET_EXPR_SLOT (base_init);
}
type = strip_array_types (TREE_TYPE (type));
- cookie_addr = fold_build1_loc (input_location, NEGATE_EXPR,
+ cookie_addr = fold_build1_loc (loc, NEGATE_EXPR,
sizetype, TYPE_SIZE_UNIT (sizetype));
cookie_addr = fold_build_pointer_plus (fold_convert (size_ptr_type, base),
cookie_addr);
else
{
if (base != error_mark_node && !(complain & tf_error))
- error ("type to vector delete is neither pointer or array type");
+ error_at (loc,
+ "type to vector delete is neither pointer or array type");
return error_mark_node;
}
- rval = build_vec_delete_1 (base, maxindex, type, auto_delete_vec,
+ rval = build_vec_delete_1 (loc, base, maxindex, type, auto_delete_vec,
use_global_delete, complain);
if (base_init && rval != error_mark_node)
rval = build2 (COMPOUND_EXPR, TREE_TYPE (rval), base_init, rval);
+ protected_set_expr_location (rval, loc);
return rval;
}