/* Handle initialization things in C++.
- Copyright (C) 1987-2017 Free Software Foundation, Inc.
+ Copyright (C) 1987-2019 Free Software Foundation, Inc.
Contributed by Michael Tiemann (tiemann@cygnus.com)
This file is part of GCC.
#include "stringpool.h"
#include "attribs.h"
#include "asan.h"
+#include "stor-layout.h"
static bool begin_init_stmts (tree *, tree *);
static tree finish_init_stmts (bool, tree, tree);
;
else if (TYPE_PTR_OR_PTRMEM_P (type))
init = fold (convert (type, nullptr_node));
+ else if (NULLPTR_TYPE_P (type))
+ init = build_int_cst (type, 0);
else if (SCALAR_TYPE_P (type))
init = fold (convert (type, integer_zero_node));
else if (RECORD_OR_UNION_CODE_P (TREE_CODE (type)))
else if (VECTOR_TYPE_P (type))
init = build_zero_cst (type);
else
- gcc_assert (TREE_CODE (type) == REFERENCE_TYPE);
+ {
+ gcc_assert (TYPE_REF_P (type));
+ init = build_zero_cst (type);
+ }
/* In all cases, the initializer is a constant. */
if (init)
gcc_assert (!processing_template_decl
|| (SCALAR_TYPE_P (type) || TREE_CODE (type) == ARRAY_TYPE));
- if (CLASS_TYPE_P (type)
- && type_build_ctor_call (type))
+ if (CLASS_TYPE_P (type) && type_build_ctor_call (type))
{
- tree ctor =
- build_special_member_call (NULL_TREE, complete_ctor_identifier,
- NULL, type, LOOKUP_NORMAL,
- complain);
- if (ctor == error_mark_node)
+ tree ctor
+ = build_special_member_call (NULL_TREE, complete_ctor_identifier,
+ NULL, type, LOOKUP_NORMAL, complain);
+ if (ctor == error_mark_node || TREE_CONSTANT (ctor))
return ctor;
tree fn = NULL_TREE;
if (TREE_CODE (ctor) == CALL_EXPR)
if (ftype == error_mark_node)
continue;
+ /* Ignore flexible array members for value initialization. */
+ if (TREE_CODE (ftype) == ARRAY_TYPE
+ && !COMPLETE_TYPE_P (ftype)
+ && !TYPE_DOMAIN (ftype)
+ && COMPLETE_TYPE_P (TREE_TYPE (ftype))
+ && (next_initializable_field (DECL_CHAIN (field))
+ == NULL_TREE))
+ continue;
+
/* We could skip vfields and fields of types with
user-defined constructors, but I think that won't improve
performance at all; it should be simpler in general just
error ("value-initialization of function type %qT", type);
return error_mark_node;
}
- else if (TREE_CODE (type) == REFERENCE_TYPE)
+ else if (TYPE_REF_P (type))
{
if (complain & tf_error)
error ("value-initialization of reference type %qT", type);
/* Return the non-static data initializer for FIELD_DECL MEMBER. */
+static GTY((cache)) decl_tree_cache_map *nsdmi_inst;
+
tree
get_nsdmi (tree member, bool in_ctor, tsubst_flags_t complain)
{
tree save_ccp = current_class_ptr;
tree save_ccr = current_class_ref;
- if (!in_ctor)
- {
- /* Use a PLACEHOLDER_EXPR when we don't have a 'this' parameter to
- refer to; constexpr evaluation knows what to do with it. */
- current_class_ref = build0 (PLACEHOLDER_EXPR, DECL_CONTEXT (member));
- current_class_ptr = build_address (current_class_ref);
- }
-
if (DECL_LANG_SPECIFIC (member) && DECL_TEMPLATE_INFO (member))
{
init = DECL_INITIAL (DECL_TI_TEMPLATE (member));
- if (TREE_CODE (init) == DEFAULT_ARG)
+ location_t expr_loc
+ = cp_expr_loc_or_loc (init, DECL_SOURCE_LOCATION (member));
+ if (TREE_CODE (init) == DEFERRED_PARSE)
/* Unparsed. */;
+ else if (tree *slot = hash_map_safe_get (nsdmi_inst, member))
+ init = *slot;
/* Check recursive instantiation. */
else if (DECL_INSTANTIATING_NSDMI_P (member))
{
if (complain & tf_error)
- error ("recursive instantiation of default member "
- "initializer for %qD", member);
+ error_at (expr_loc, "recursive instantiation of default member "
+ "initializer for %qD", member);
init = error_mark_node;
}
else
{
+ cp_evaluated ev;
+
+ location_t sloc = input_location;
+ input_location = expr_loc;
+
DECL_INSTANTIATING_NSDMI_P (member) = 1;
+ bool pushed = false;
+ if (!currently_open_class (DECL_CONTEXT (member)))
+ {
+ push_to_top_level ();
+ push_nested_class (DECL_CONTEXT (member));
+ pushed = true;
+ }
+
+ gcc_checking_assert (!processing_template_decl);
+
+ inject_this_parameter (DECL_CONTEXT (member), TYPE_UNQUALIFIED);
+
+ start_lambda_scope (member);
+
/* Do deferred instantiation of the NSDMI. */
init = (tsubst_copy_and_build
(init, DECL_TI_ARGS (member),
complain, member, /*function_p=*/false,
/*integral_constant_expression_p=*/false));
init = digest_nsdmi_init (member, init, complain);
-
+
+ finish_lambda_scope ();
+
DECL_INSTANTIATING_NSDMI_P (member) = 0;
+
+ if (init != error_mark_node)
+ hash_map_safe_put<hm_ggc> (nsdmi_inst, member, init);
+
+ if (pushed)
+ {
+ pop_nested_class ();
+ pop_from_top_level ();
+ }
+
+ input_location = sloc;
}
}
else
init = DECL_INITIAL (member);
- if (init && TREE_CODE (init) == DEFAULT_ARG)
+ if (init && TREE_CODE (init) == DEFERRED_PARSE)
{
if (complain & tf_error)
{
init = error_mark_node;
}
+ if (in_ctor)
+ {
+ current_class_ptr = save_ccp;
+ current_class_ref = save_ccr;
+ }
+ else
+ {
+ /* Use a PLACEHOLDER_EXPR when we don't have a 'this' parameter to
+ refer to; constexpr evaluation knows what to do with it. */
+ current_class_ref = build0 (PLACEHOLDER_EXPR, DECL_CONTEXT (member));
+ current_class_ptr = build_address (current_class_ref);
+ }
+
/* Strip redundant TARGET_EXPR so we don't need to remap it, and
so the aggregate init code below will see a CONSTRUCTOR. */
bool simple_target = (init && SIMPLE_TARGET_EXPR_P (init));
if (simple_target)
init = TARGET_EXPR_INITIAL (init);
- init = break_out_target_exprs (init);
+ init = break_out_target_exprs (init, /*loc*/true);
if (simple_target && TREE_CODE (init) != CONSTRUCTOR)
/* Now put it back so C++17 copy elision works. */
init = get_target_expr (init);
return true;
}
+/* If INIT's value can come from a call to std::initializer_list<T>::begin,
+ return that function. Otherwise, NULL_TREE. */
+
+static tree
+find_list_begin (tree init)
+{
+ STRIP_NOPS (init);
+ while (TREE_CODE (init) == COMPOUND_EXPR)
+ init = TREE_OPERAND (init, 1);
+ STRIP_NOPS (init);
+ if (TREE_CODE (init) == COND_EXPR)
+ {
+ tree left = TREE_OPERAND (init, 1);
+ if (!left)
+ left = TREE_OPERAND (init, 0);
+ left = find_list_begin (left);
+ if (left)
+ return left;
+ return find_list_begin (TREE_OPERAND (init, 2));
+ }
+ if (TREE_CODE (init) == CALL_EXPR)
+ if (tree fn = get_callee_fndecl (init))
+ if (id_equal (DECL_NAME (fn), "begin")
+ && is_std_init_list (DECL_CONTEXT (fn)))
+ return fn;
+ return NULL_TREE;
+}
+
+/* If INIT initializing MEMBER is copying the address of the underlying array
+ of an initializer_list, warn. */
+
+static void
+maybe_warn_list_ctor (tree member, tree init)
+{
+ tree memtype = TREE_TYPE (member);
+ if (!init || !TYPE_PTR_P (memtype)
+ || !is_list_ctor (current_function_decl))
+ return;
+
+ tree parms = FUNCTION_FIRST_USER_PARMTYPE (current_function_decl);
+ tree initlist = non_reference (TREE_VALUE (parms));
+ tree targs = CLASSTYPE_TI_ARGS (initlist);
+ tree elttype = TREE_VEC_ELT (targs, 0);
+
+ if (!same_type_ignoring_top_level_qualifiers_p
+ (TREE_TYPE (memtype), elttype))
+ return;
+
+ tree begin = find_list_begin (init);
+ if (!begin)
+ return;
+
+ location_t loc = cp_expr_loc_or_input_loc (init);
+ warning_at (loc, OPT_Winit_list_lifetime,
+ "initializing %qD from %qE does not extend the lifetime "
+ "of the underlying array", member, begin);
+}
+
/* Initialize MEMBER, a FIELD_DECL, with INIT, a TREE_LIST of
arguments. If TREE_LIST is void_type_node, an empty initializer
list was given; if NULL_TREE no initializer was given. */
}
}
else if (init
- && (TREE_CODE (type) == REFERENCE_TYPE
+ && (TYPE_REF_P (type)
/* Pre-digested NSDMI. */
|| (((TREE_CODE (init) == CONSTRUCTOR
&& TREE_TYPE (init) == type)
reference member in a constructor’s ctor-initializer (12.6.2)
persists until the constructor exits." */
unsigned i; tree t;
- vec<tree, va_gc> *cleanups = make_tree_vector ();
+ releasing_vec cleanups;
if (TREE_CODE (init) == TREE_LIST)
init = build_x_compound_expr_from_list (init, ELK_MEM_INIT,
tf_warning_or_error);
finish_expr_stmt (init);
FOR_EACH_VEC_ELT (*cleanups, i, t)
push_cleanup (decl, t, false);
- release_tree_vector (cleanups);
}
else if (type_build_ctor_call (type)
|| (init && CLASS_TYPE_P (strip_array_types (type))))
{
/* TYPE_NEEDS_CONSTRUCTING can be set just because we have a
vtable; still give this diagnostic. */
+ auto_diagnostic_group d;
if (permerror (DECL_SOURCE_LOCATION (current_function_decl),
"uninitialized const member in %q#T", type))
inform (DECL_SOURCE_LOCATION (member),
{
tree core_type;
/* member traversal: note it leaves init NULL */
- if (TREE_CODE (type) == REFERENCE_TYPE)
+ if (TYPE_REF_P (type))
{
+ auto_diagnostic_group d;
if (permerror (DECL_SOURCE_LOCATION (current_function_decl),
"uninitialized reference member in %q#T", type))
inform (DECL_SOURCE_LOCATION (member),
}
else if (CP_TYPE_CONST_P (type))
{
+ auto_diagnostic_group d;
if (permerror (DECL_SOURCE_LOCATION (current_function_decl),
"uninitialized const member in %q#T", type))
inform (DECL_SOURCE_LOCATION (member),
init = build_x_compound_expr_from_list (init, ELK_MEM_INIT,
tf_warning_or_error);
+ maybe_warn_list_ctor (member, init);
+
/* Reject a member initializer for a flexible array member. */
if (init && !maybe_reject_flexarray_init (member, init))
finish_expr_stmt (cp_build_modify_expr (input_location, decl,
base_addr = build_base_path (PLUS_EXPR, current_class_ptr,
subobject, 1, tf_warning_or_error);
expand_aggr_init_1 (subobject, NULL_TREE,
- cp_build_indirect_ref (base_addr, RO_NULL,
- tf_warning_or_error),
+ cp_build_fold_indirect_ref (base_addr),
arguments,
flags,
tf_warning_or_error);
/* Compute the value to use, when there's a VTT. */
vtt_parm = current_vtt_parm;
vtbl2 = fold_build_pointer_plus (vtt_parm, vtt_index);
- vtbl2 = cp_build_indirect_ref (vtbl2, RO_NULL, tf_warning_or_error);
+ vtbl2 = cp_build_fold_indirect_ref (vtbl2);
vtbl2 = convert (TREE_TYPE (vtbl), vtbl2);
/* The actual initializer is the VTT value only in the subobject
}
/* Compute the location of the vtpr. */
- vtbl_ptr = build_vfield_ref (cp_build_indirect_ref (decl, RO_NULL,
- tf_warning_or_error),
+ vtbl_ptr = build_vfield_ref (cp_build_fold_indirect_ref (decl),
TREE_TYPE (binfo));
gcc_assert (vtbl_ptr != error_mark_node);
return error_mark_node;
location_t init_loc = (init
- ? EXPR_LOC_OR_LOC (init, input_location)
+ ? cp_expr_loc_or_input_loc (init)
: location_of (exp));
TREE_READONLY (exp) = 0;
if (VAR_P (exp) && DECL_DECOMPOSITION_P (exp))
{
from_array = 1;
- if (init && DECL_P (init)
+ init = mark_rvalue_use (init);
+ if (init
+ && DECL_P (tree_strip_any_location_wrapper (init))
&& !(flags & LOOKUP_ONLYCONVERTING))
{
/* Wrap the initializer in a CONSTRUCTOR so that build_vec_init
}
else
{
- /* An array may not be initialized use the parenthesized
- initialization form -- unless the initializer is "()". */
- if (init && TREE_CODE (init) == TREE_LIST)
- {
- if (complain & tf_error)
- error ("bad array initializer");
- return error_mark_node;
- }
/* Must arrange to initialize each element of EXP
from elements of INIT. */
if (cv_qualified_p (type))
from_array = (itype && same_type_p (TREE_TYPE (init),
TREE_TYPE (exp)));
- if (init && !from_array
- && !BRACE_ENCLOSED_INITIALIZER_P (init))
+ if (init && !BRACE_ENCLOSED_INITIALIZER_P (init)
+ && (!from_array
+ || (TREE_CODE (init) != CONSTRUCTOR
+ /* Can happen, eg, handling the compound-literals
+ extension (ext/complit12.C). */
+ && TREE_CODE (init) != TARGET_EXPR)))
{
if (complain & tf_error)
- permerror (init_loc, "array must be initialized "
- "with a brace-enclosed initializer");
- else
- return error_mark_node;
+ error_at (init_loc, "array must be initialized "
+ "with a brace-enclosed initializer");
+ return error_mark_node;
}
}
&& !DIRECT_LIST_INIT_P (init))
flags |= LOOKUP_ONLYCONVERTING;
- if ((VAR_P (exp) || TREE_CODE (exp) == PARM_DECL)
- && !lookup_attribute ("warn_unused", TYPE_ATTRIBUTES (type)))
- /* Just know that we've seen something for this node. */
- TREE_USED (exp) = 1;
-
is_global = begin_init_stmts (&stmt_expr, &compound_stmt);
destroy_temps = stmts_are_full_exprs_p ();
current_stmt_tree ()->stmts_are_full_exprs_p = 0;
TREE_READONLY (exp) = was_const;
TREE_THIS_VOLATILE (exp) = was_volatile;
+ if ((VAR_P (exp) || TREE_CODE (exp) == PARM_DECL)
+ && TREE_SIDE_EFFECTS (stmt_expr)
+ && !lookup_attribute ("warn_unused", TYPE_ATTRIBUTES (type)))
+ /* Just know that we've seen something for this node. */
+ TREE_USED (exp) = 1;
+
return stmt_expr;
}
tree elt; unsigned i;
/* Unshare the arguments for the second call. */
- vec<tree, va_gc> *parms2 = make_tree_vector ();
+ releasing_vec parms2;
FOR_EACH_VEC_SAFE_ELT (parms, i, elt)
{
elt = break_out_target_exprs (elt);
&parms2, binfo, flags,
complain);
complete = fold_build_cleanup_point_expr (void_type_node, complete);
- release_tree_vector (parms2);
base = build_special_member_call (exp, base_ctor_identifier,
&parms, binfo, flags,
/* If the type has data but no user-provided ctor, we need to zero
out the object. */
if (!type_has_user_provided_constructor (type)
- && !is_really_empty_class (type))
+ && !is_really_empty_class (type, /*ignore_vptr*/true))
{
tree field_size = NULL_TREE;
if (exp != true_exp && CLASSTYPE_AS_BASE (type) != type)
initializer for the static data member is not processed
until needed; we need it now. */
mark_used (decl, tf_none);
- mark_rvalue_use (decl);
init = DECL_INITIAL (decl);
if (init == error_mark_node)
{
|| TREE_CODE (init) == STRING_CST)))
break;
/* Don't return a CONSTRUCTOR for a variable with partial run-time
- initialization, since it doesn't represent the entire value. */
- if (TREE_CODE (init) == CONSTRUCTOR
+ initialization, since it doesn't represent the entire value.
+ Similarly for VECTOR_CSTs created by cp_folding those
+ CONSTRUCTORs. */
+ if ((TREE_CODE (init) == CONSTRUCTOR
+ || TREE_CODE (init) == VECTOR_CST)
&& !DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (decl))
break;
/* If the variable has a dynamic initializer, don't use its
if (type_has_user_provided_constructor (field_type))
continue;
- if (TREE_CODE (field_type) == REFERENCE_TYPE)
+ if (TYPE_REF_P (field_type))
{
++ error_count;
if (complain)
{
tree name = get_identifier ("__cxa_throw_bad_array_new_length");
- fn = IDENTIFIER_GLOBAL_VALUE (name);
+ fn = get_global_binding (name);
if (!fn)
fn = push_throw_library_fn
(name, build_function_type_list (sizetype, NULL_TREE));
return build_cxx_call (fn, 0, NULL, tf_warning_or_error);
}
-/* Attempt to find the initializer for field T in the initializer INIT,
- when non-null. Returns the initializer when successful and NULL
- otherwise. */
+/* Attempt to find the initializer for flexible array field T in the
+ initializer INIT, when non-null. Returns the initializer when
+ successful and NULL otherwise. */
static tree
-find_field_init (tree t, tree init)
+find_flexarray_init (tree t, tree init)
{
- if (!init)
+ if (!init || init == error_mark_node)
return NULL_TREE;
unsigned HOST_WIDE_INT idx;
/* Iterate over all top-level initializer elements. */
FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), idx, field, elt)
- {
- /* If the member T is found, return it. */
- if (field == t)
- return elt;
-
- /* Otherwise continue and/or recurse into nested initializers. */
- if (TREE_CODE (elt) == CONSTRUCTOR
- && (init = find_field_init (t, elt)))
- return init;
- }
+ /* If the member T is found, return it. */
+ if (field == t)
+ return elt;
+
return NULL_TREE;
}
static void
warn_placement_new_too_small (tree type, tree nelts, tree size, tree oper)
{
- location_t loc = EXPR_LOC_OR_LOC (oper, input_location);
+ location_t loc = cp_expr_loc_or_input_loc (oper);
/* The number of bytes to add to or subtract from the size of the provided
buffer based on an offset into an array or an array element reference.
- Although intermediate results may be negative (as in a[3] - 2) the final
- result cannot be. */
- HOST_WIDE_INT adjust = 0;
+ Although intermediate results may be negative (as in a[3] - 2) a valid
+ final result cannot be. */
+ offset_int adjust = 0;
/* True when the size of the entire destination object should be used
to compute the possibly optimistic estimate of the available space. */
bool use_obj_size = false;
is a constant. */
if (TREE_CODE (oper) == POINTER_PLUS_EXPR)
{
- /* If the offset is comple-time constant, use it to compute a more
+ /* If the offset is compile-time constant, use it to compute a more
accurate estimate of the size of the buffer. Since the operand
of POINTER_PLUS_EXPR is represented as an unsigned type, convert
it to signed first.
Otherwise, use the size of the entire array as an optimistic
estimate (this may lead to false negatives). */
tree adj = TREE_OPERAND (oper, 1);
+ adj = fold_for_warn (adj);
if (CONSTANT_CLASS_P (adj))
- adjust += tree_to_shwi (convert (ssizetype, adj));
+ adjust += wi::to_offset (convert (ssizetype, adj));
else
use_obj_size = true;
not a compile-time constant, use the index to determine the
size of the buffer. Otherwise, use the entire array as
an optimistic estimate of the size. */
- const_tree adj = TREE_OPERAND (oper, 1);
+ const_tree adj = fold_non_dependent_expr (TREE_OPERAND (oper, 1));
if (!use_obj_size && CONSTANT_CLASS_P (adj))
- adjust += tree_to_shwi (adj);
+ adjust += wi::to_offset (adj);
else
{
use_obj_size = true;
members from arrays of unspecified size. */
bool compref = TREE_CODE (oper) == COMPONENT_REF;
+ /* For COMPONENT_REF (i.e., a struct member) the size of the entire
+ enclosing struct. Used to validate the adjustment (offset) into
+ an array at the end of a struct. */
+ offset_int compsize = 0;
+
/* Descend into a struct or union to find the member whose address
is being used as the argument. */
if (TREE_CODE (oper) == COMPONENT_REF)
{
+ tree comptype = TREE_TYPE (TREE_OPERAND (oper, 0));
+ compsize = wi::to_offset (TYPE_SIZE_UNIT (comptype));
+
tree op0 = oper;
while (TREE_CODE (op0 = TREE_OPERAND (op0, 0)) == COMPONENT_REF);
+ STRIP_ANY_LOCATION_WRAPPER (op0);
if (VAR_P (op0))
var_decl = op0;
oper = TREE_OPERAND (oper, 1);
}
- if ((addr_expr || !POINTER_TYPE_P (TREE_TYPE (oper)))
+ STRIP_ANY_LOCATION_WRAPPER (oper);
+ tree opertype = TREE_TYPE (oper);
+ if ((addr_expr || !INDIRECT_TYPE_P (opertype))
&& (VAR_P (oper)
|| TREE_CODE (oper) == FIELD_DECL
|| TREE_CODE (oper) == PARM_DECL))
{
/* A possibly optimistic estimate of the number of bytes available
in the destination buffer. */
- unsigned HOST_WIDE_INT bytes_avail = 0;
+ offset_int bytes_avail = 0;
/* True when the estimate above is in fact the exact size
of the destination buffer rather than an estimate. */
bool exact_size = true;
/* Use the size of the entire array object when the expression
refers to a variable or its size depends on an expression
that's not a compile-time constant. */
- bytes_avail = tree_to_uhwi (DECL_SIZE_UNIT (oper));
+ bytes_avail = wi::to_offset (DECL_SIZE_UNIT (oper));
exact_size = !use_obj_size;
}
- else if (TYPE_SIZE_UNIT (TREE_TYPE (oper))
- && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (oper))))
+ else if (tree opersize = TYPE_SIZE_UNIT (opertype))
{
/* Use the size of the type of the destination buffer object
- as the optimistic estimate of the available space in it. */
- bytes_avail = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (oper)));
- }
- else if (var_decl)
- {
- /* Constructing into a buffer provided by the flexible array
- member of a declared object (which is permitted as a G++
- extension). If the array member has been initialized,
- determine its size from the initializer. Otherwise,
- the array size is zero. */
- bytes_avail = 0;
-
- if (tree init = find_field_init (oper, DECL_INITIAL (var_decl)))
- bytes_avail = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (init)));
+ as the optimistic estimate of the available space in it.
+ Use the maximum possible size for zero-size arrays and
+ flexible array members (except of initialized objects
+ thereof). */
+ if (TREE_CODE (opersize) == INTEGER_CST)
+ bytes_avail = wi::to_offset (opersize);
}
- else
+
+ if (bytes_avail == 0)
{
- /* Bail if neither the size of the object nor its type is known. */
- return;
+ if (var_decl)
+ {
+ /* Constructing into a buffer provided by the flexible array
+ member of a declared object (which is permitted as a G++
+ extension). If the array member has been initialized,
+ determine its size from the initializer. Otherwise,
+ the array size is zero. */
+ if (tree init = find_flexarray_init (oper,
+ DECL_INITIAL (var_decl)))
+ bytes_avail = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (init)));
+ }
+ else
+ bytes_avail = (wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node))
+ - compsize);
}
- tree_code oper_code = TREE_CODE (TREE_TYPE (oper));
+ tree_code oper_code = TREE_CODE (opertype);
if (compref && oper_code == ARRAY_TYPE)
{
- /* Avoid diagnosing flexible array members (which are accepted
- as an extension and diagnosed with -Wpedantic) and zero-length
- arrays (also an extension).
- Overflowing construction in one-element arrays is diagnosed
- only at level 2. */
- if (bytes_avail == 0 && !var_decl)
- return;
-
- tree nelts = array_type_nelts_top (TREE_TYPE (oper));
+ tree nelts = array_type_nelts_top (opertype);
tree nelts_cst = maybe_constant_value (nelts);
if (TREE_CODE (nelts_cst) == INTEGER_CST
&& integer_onep (nelts_cst)
return;
}
- /* The size of the buffer can only be adjusted down but not up. */
- gcc_checking_assert (0 <= adjust);
-
/* Reduce the size of the buffer by the adjustment computed above
from the offset and/or the index into the array. */
- if (bytes_avail < static_cast<unsigned HOST_WIDE_INT>(adjust))
+ if (bytes_avail < adjust || adjust < 0)
bytes_avail = 0;
else
- bytes_avail -= adjust;
+ {
+ tree elttype = (TREE_CODE (opertype) == ARRAY_TYPE
+ ? TREE_TYPE (opertype) : opertype);
+ if (tree eltsize = TYPE_SIZE_UNIT (elttype))
+ {
+ bytes_avail -= adjust * wi::to_offset (eltsize);
+ if (bytes_avail < 0)
+ bytes_avail = 0;
+ }
+ }
/* The minimum amount of space needed for the allocation. This
is an optimistic estimate that makes it possible to detect
placement new invocation for some undersize buffers but not
others. */
- unsigned HOST_WIDE_INT bytes_need;
+ offset_int bytes_need;
+
+ if (nelts)
+ nelts = fold_for_warn (nelts);
if (CONSTANT_CLASS_P (size))
- bytes_need = tree_to_uhwi (size);
+ bytes_need = wi::to_offset (size);
else if (nelts && CONSTANT_CLASS_P (nelts))
- bytes_need = tree_to_uhwi (nelts)
- * tree_to_uhwi (TYPE_SIZE_UNIT (type));
+ bytes_need = (wi::to_offset (nelts)
+ * wi::to_offset (TYPE_SIZE_UNIT (type)));
else if (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
- bytes_need = tree_to_uhwi (TYPE_SIZE_UNIT (type));
+ bytes_need = wi::to_offset (TYPE_SIZE_UNIT (type));
else
{
/* The type is a VLA. */
: "placement new constructing an object of type "
"%<%T [%wu]%> and size %qwu in a region of type %qT "
"and size at most %qwu",
- type, tree_to_uhwi (nelts), bytes_need,
- TREE_TYPE (oper),
- bytes_avail);
+ type, tree_to_uhwi (nelts), bytes_need.to_uhwi (),
+ opertype, bytes_avail.to_uhwi ());
else
warning_at (loc, OPT_Wplacement_new_,
exact_size ?
: "placement new constructing an array of objects "
"of type %qT and size %qwu in a region of type %qT "
"and size at most %qwu",
- type, bytes_need, TREE_TYPE (oper),
- bytes_avail);
+ type, bytes_need.to_uhwi (), opertype,
+ bytes_avail.to_uhwi ());
else
warning_at (loc, OPT_Wplacement_new_,
exact_size ?
: "placement new constructing an object of type %qT "
"and size %qwu in a region of type %qT and size "
"at most %qwu",
- type, bytes_need, TREE_TYPE (oper),
- bytes_avail);
+ type, bytes_need.to_uhwi (), opertype,
+ bytes_avail.to_uhwi ());
}
}
}
}
/* Determine whether an allocation function is a namespace-scope
- non-replaceable placement new function. See DR 1748.
- TODO: Enable in all standard modes. */
+ non-replaceable placement new function. See DR 1748. */
static bool
std_placement_new_fn_p (tree alloc_fn)
{
- if ((cxx_dialect > cxx14) && DECL_NAMESPACE_SCOPE_P (alloc_fn))
+ if (DECL_NAMESPACE_SCOPE_P (alloc_fn))
{
tree first_arg = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (alloc_fn)));
if ((TREE_VALUE (first_arg) == ptr_type_node)
return false;
}
+/* For element type ELT_TYPE, return the appropriate type of the heap object
+ containing such element(s). COOKIE_SIZE is NULL or the size of cookie
+ in bytes. FULL_SIZE is NULL if it is unknown how big the heap allocation
+ will be, otherwise size of the heap object. If COOKIE_SIZE is NULL,
+ return array type ELT_TYPE[FULL_SIZE / sizeof(ELT_TYPE)], otherwise return
+ struct { size_t[COOKIE_SIZE/sizeof(size_t)]; ELT_TYPE[N]; }
+ where N is nothing (flexible array member) if FULL_SIZE is NULL, otherwise
+ it is computed such that the size of the struct fits into FULL_SIZE. */
+
+tree
+build_new_constexpr_heap_type (tree elt_type, tree cookie_size, tree full_size)
+{
+ gcc_assert (cookie_size == NULL_TREE || tree_fits_uhwi_p (cookie_size));
+ gcc_assert (full_size == NULL_TREE || tree_fits_uhwi_p (full_size));
+ unsigned HOST_WIDE_INT csz = cookie_size ? tree_to_uhwi (cookie_size) : 0;
+ tree itype2 = NULL_TREE;
+ if (full_size)
+ {
+ unsigned HOST_WIDE_INT fsz = tree_to_uhwi (full_size);
+ gcc_assert (fsz >= csz);
+ fsz -= csz;
+ fsz /= int_size_in_bytes (elt_type);
+ itype2 = build_index_type (size_int (fsz - 1));
+ if (!cookie_size)
+ return build_cplus_array_type (elt_type, itype2);
+ }
+ else
+ gcc_assert (cookie_size);
+ csz /= int_size_in_bytes (sizetype);
+ tree itype1 = build_index_type (size_int (csz - 1));
+ tree atype1 = build_cplus_array_type (sizetype, itype1);
+ tree atype2 = build_cplus_array_type (elt_type, itype2);
+ tree rtype = cxx_make_type (RECORD_TYPE);
+ TYPE_NAME (rtype) = heap_identifier;
+ tree fld1 = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE, atype1);
+ tree fld2 = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE, atype2);
+ DECL_FIELD_CONTEXT (fld1) = rtype;
+ DECL_FIELD_CONTEXT (fld2) = rtype;
+ DECL_ARTIFICIAL (fld1) = true;
+ DECL_ARTIFICIAL (fld2) = true;
+ TYPE_FIELDS (rtype) = fld1;
+ DECL_CHAIN (fld1) = fld2;
+ layout_type (rtype);
+ return rtype;
+}
+
+/* Help the constexpr code to find the right type for the heap variable
+ by adding a NOP_EXPR around ALLOC_CALL if needed for cookie_size.
+ Return ALLOC_CALL or ALLOC_CALL cast to a pointer to
+ struct { size_t[cookie_size/sizeof(size_t)]; elt_type[]; }. */
+
+static tree
+maybe_wrap_new_for_constexpr (tree alloc_call, tree elt_type, tree cookie_size)
+{
+ if (cxx_dialect < cxx2a)
+ return alloc_call;
+
+ if (current_function_decl != NULL_TREE
+ && !DECL_DECLARED_CONSTEXPR_P (current_function_decl))
+ return alloc_call;
+
+ tree call_expr = extract_call_expr (alloc_call);
+ if (call_expr == error_mark_node)
+ return alloc_call;
+
+ tree alloc_call_fndecl = cp_get_callee_fndecl_nofold (call_expr);
+ if (alloc_call_fndecl == NULL_TREE
+ || !IDENTIFIER_NEW_OP_P (DECL_NAME (alloc_call_fndecl))
+ || CP_DECL_CONTEXT (alloc_call_fndecl) != global_namespace)
+ return alloc_call;
+
+ tree rtype = build_new_constexpr_heap_type (elt_type, cookie_size,
+ NULL_TREE);
+ return build_nop (build_pointer_type (rtype), alloc_call);
+}
+
/* Generate code for a new-expression, including calling the "operator
new" function, initializing the object, and, if an exception occurs
during construction, cleaning up. The arguments are as for
outer_nelts_from_type = true;
}
- /* Lots of logic below. depends on whether we have a constant number of
+ /* Lots of logic below depends on whether we have a constant number of
elements, so go ahead and fold it now. */
- if (outer_nelts)
- outer_nelts = maybe_constant_value (outer_nelts);
+ const_tree cst_outer_nelts = fold_non_dependent_expr (outer_nelts, complain);
/* If our base type is an array, then make sure we know how many elements
it has. */
tree inner_nelts_cst = maybe_constant_value (inner_nelts);
if (TREE_CODE (inner_nelts_cst) == INTEGER_CST)
{
- bool overflow;
+ wi::overflow_type overflow;
offset_int result = wi::mul (wi::to_offset (inner_nelts_cst),
inner_nelts_count, SIGNED, &overflow);
if (overflow)
{
if (complain & tf_error)
{
- error_at (EXPR_LOC_OR_LOC (inner_nelts, input_location),
+ error_at (cp_expr_loc_or_input_loc (inner_nelts),
"array size in new-expression must be constant");
cxx_constant_value(inner_nelts);
}
/* Warn if we performed the (T[N]) to T[N] transformation and N is
variable. */
if (outer_nelts_from_type
- && !TREE_CONSTANT (outer_nelts))
+ && !TREE_CONSTANT (cst_outer_nelts))
{
if (complain & tf_warning_or_error)
{
- pedwarn (EXPR_LOC_OR_LOC (outer_nelts, input_location), OPT_Wvla,
+ pedwarn (cp_expr_loc_or_input_loc (outer_nelts), OPT_Wvla,
typedef_variant_p (orig_type)
? G_("non-constant array new length must be specified "
- "directly, not by typedef")
+ "directly, not by %<typedef%>")
: G_("non-constant array new length must be specified "
"without parentheses around the type-id"));
}
if (VOID_TYPE_P (elt_type))
{
if (complain & tf_error)
- error ("invalid type %<void%> for new");
+ error ("invalid type %<void%> for %<new%>");
return error_mark_node;
}
+ if (is_std_init_list (elt_type))
+ warning (OPT_Winit_list_lifetime,
+ "%<new%> of %<initializer_list%> does not "
+ "extend the lifetime of the underlying array");
+
if (abstract_virtuals_error_sfinae (ACU_NEW, elt_type, complain))
return error_mark_node;
maximum object size and is safe even if we choose not to use
a cookie after all. */
max_size -= wi::to_offset (cookie_size);
- bool overflow;
+ wi::overflow_type overflow;
inner_size = wi::mul (wi::to_offset (size), inner_nelts_count, SIGNED,
&overflow);
if (overflow || wi::gtu_p (inner_size, max_size))
{
if (complain & tf_error)
- error ("size of array is too large");
+ {
+ cst_size_error error;
+ if (overflow)
+ error = cst_size_overflow;
+ else
+ {
+ error = cst_size_too_big;
+ size = size_binop (MULT_EXPR, size,
+ wide_int_to_tree (sizetype,
+ inner_nelts_count));
+ size = cp_fully_fold (size);
+ }
+ invalid_array_size_error (input_location, error, size,
+ /*name=*/NULL_TREE);
+ }
return error_mark_node;
}
size = size_binop (MULT_EXPR, size, fold_convert (sizetype, nelts));
- if (INTEGER_CST == TREE_CODE (outer_nelts))
+ if (TREE_CODE (cst_outer_nelts) == INTEGER_CST)
{
- if (tree_int_cst_lt (max_outer_nelts_tree, outer_nelts))
+ if (tree_int_cst_lt (max_outer_nelts_tree, cst_outer_nelts))
{
/* When the array size is constant, check it at compile time
to make sure it doesn't exceed the implementation-defined
isn't explicitly stated but it's enforced anyway -- see
grokdeclarator in cp/decl.c). */
if (complain & tf_error)
- error ("size of array is too large");
+ {
+ size = cp_fully_fold (size);
+ invalid_array_size_error (input_location, cst_size_too_big,
+ size, NULL_TREE);
+ }
return error_mark_node;
}
}
tree fnname;
tree fns;
- fnname = cp_operator_id (array_p ? VEC_NEW_EXPR : NEW_EXPR);
+ fnname = ovl_op_identifier (false, array_p ? VEC_NEW_EXPR : NEW_EXPR);
member_new_p = !globally_qualified_p
&& CLASS_TYPE_P (elt_type)
|| CP_DECL_CONTEXT (alloc_fn) == global_namespace)
&& !aligned_allocation_fn_p (alloc_fn))
{
+ auto_diagnostic_group d;
if (warning (OPT_Waligned_new_, "%<new%> of type %qT with extended "
"alignment %d", elt_type, TYPE_ALIGN_UNIT (elt_type)))
{
}
}
+ tree alloc_call_expr = extract_call_expr (alloc_call);
+ if (TREE_CODE (alloc_call_expr) == CALL_EXPR)
+ CALL_FROM_NEW_OR_DELETE_P (alloc_call_expr) = 1;
+
+ if (cookie_size)
+ alloc_call = maybe_wrap_new_for_constexpr (alloc_call, elt_type,
+ cookie_size);
+
/* In the simple case, we can stop now. */
pointer_type = build_pointer_type (type);
if (!cookie_size && !is_initialized)
alloc_node, cookie_ptr);
size_ptr_type = build_pointer_type (sizetype);
cookie_ptr = fold_convert (size_ptr_type, cookie_ptr);
- cookie = cp_build_indirect_ref (cookie_ptr, RO_NULL, complain);
+ cookie = cp_build_fold_indirect_ref (cookie_ptr);
cookie_expr = build2 (MODIFY_EXPR, sizetype, cookie, nelts);
NEGATE_EXPR, sizetype,
size_in_bytes (sizetype)));
- cookie = cp_build_indirect_ref (cookie_ptr, RO_NULL, complain);
+ cookie = cp_build_fold_indirect_ref (cookie_ptr);
cookie = build2 (MODIFY_EXPR, sizetype, cookie,
size_in_bytes (elt_type));
cookie_expr = build2 (COMPOUND_EXPR, TREE_TYPE (cookie_expr),
the initializer anyway since we're going to throw it away and
rebuild it at instantiation time, so just build up a single
constructor call to get any appropriate diagnostics. */
- init_expr = cp_build_indirect_ref (data_addr, RO_NULL, complain);
+ init_expr = cp_build_fold_indirect_ref (data_addr);
if (type_build_ctor_call (elt_type))
init_expr = build_special_member_call (init_expr,
complete_ctor_identifier,
else if (*init)
{
if (complain & tf_error)
- permerror (input_location,
- "parenthesized initializer in array new");
- else
- return error_mark_node;
- vecinit = build_tree_list_vec (*init);
+ error ("parenthesized initializer in array new");
+ return error_mark_node;
}
init_expr
= build_vec_init (data_addr,
}
else
{
- init_expr = cp_build_indirect_ref (data_addr, RO_NULL, complain);
+ init_expr = cp_build_fold_indirect_ref (data_addr);
if (type_build_ctor_call (type) && !explicit_value_init_p)
{
if (auto_node)
{
tree d_init = NULL_TREE;
- if (vec_safe_length (*init) == 1)
+ const size_t len = vec_safe_length (*init);
+ /* E.g. new auto(x) must have exactly one element, or
+ a {} initializer will have one element. */
+ if (len == 1)
{
d_init = (**init)[0];
d_init = resolve_nondeduced_context (d_init, complain);
}
- type = do_auto_deduction (type, d_init, auto_node);
+ /* For the rest, e.g. new A(1, 2, 3), create a list. */
+ else if (len > 1)
+ {
+ unsigned int n;
+ tree t;
+ tree *pp = &d_init;
+ FOR_EACH_VEC_ELT (**init, n, t)
+ {
+ t = resolve_nondeduced_context (t, complain);
+ *pp = build_tree_list (NULL_TREE, t);
+ pp = &TREE_CHAIN (*pp);
+ }
+ }
+ type = do_auto_deduction (type, d_init, auto_node, complain);
}
}
if (!build_expr_type_conversion (WANT_INT | WANT_ENUM, nelts, false))
{
if (complain & tf_error)
- permerror (input_location, "size in array new must have integral type");
+ permerror (cp_expr_loc_or_input_loc (nelts),
+ "size in array new must have integral type");
else
return error_mark_node;
}
/* Try to determine the constant value only for the purposes
of the diagnostic below but continue to use the original
value and handle const folding later. */
- const_tree cst_nelts = maybe_constant_value (nelts);
+ const_tree cst_nelts = fold_non_dependent_expr (nelts, complain);
/* The expression in a noptr-new-declarator is erroneous if it's of
non-class type and its value before converting to std::size_t is
less than zero. ... If the expression is a constant expression,
the program is ill-fomed. */
- if (INTEGER_CST == TREE_CODE (cst_nelts)
- && tree_int_cst_sgn (cst_nelts) == -1)
- {
- if (complain & tf_error)
- error ("size of array is negative");
- return error_mark_node;
- }
+ if (TREE_CODE (cst_nelts) == INTEGER_CST
+ && !valid_array_size_p (cp_expr_loc_or_input_loc (nelts),
+ cst_nelts, NULL_TREE,
+ complain & tf_error))
+ return error_mark_node;
nelts = mark_rvalue_use (nelts);
nelts = cp_save_expr (cp_convert (sizetype, nelts, complain));
/* ``A reference cannot be created by the new operator. A reference
is not an object (8.2.2, 8.4.3), so a pointer to it could not be
returned by new.'' ARM 5.3.3 */
- if (TREE_CODE (type) == REFERENCE_TYPE)
+ if (TYPE_REF_P (type))
{
if (complain & tf_error)
error ("new cannot be applied to a reference type");
if (!COMPLETE_TYPE_P (type))
{
- if ((complain & tf_warning)
- && warning (OPT_Wdelete_incomplete,
- "possible problem detected in invocation of "
- "delete [] operator:"))
- {
- cxx_incomplete_type_diagnostic (base, type, DK_WARNING);
- inform (input_location, "neither the destructor nor the "
- "class-specific operator delete [] will be called, "
- "even if they are declared when the class is defined");
- }
+ if (complain & tf_warning)
+ {
+ auto_diagnostic_group d;
+ if (warning (OPT_Wdelete_incomplete,
+ "possible problem detected in invocation of "
+ "operator %<delete []%>"))
+ {
+ cxx_incomplete_type_diagnostic (base, type, DK_WARNING);
+ inform (input_location, "neither the destructor nor the "
+ "class-specific operator %<delete []%> will be called, "
+ "even if they are declared when the class is defined");
+ }
+ }
/* This size won't actually be used. */
size_exp = size_one_node;
goto no_destructor;
fold_convert (sizetype, maxindex));
tbase = create_temporary_var (ptype);
- tbase_init
- = cp_build_modify_expr (input_location, tbase, NOP_EXPR,
- fold_build_pointer_plus_loc (input_location,
- fold_convert (ptype,
- base),
- virtual_size),
- complain);
- if (tbase_init == error_mark_node)
- return error_mark_node;
- controller = build3 (BIND_EXPR, void_type_node, tbase,
- NULL_TREE, NULL_TREE);
+ DECL_INITIAL (tbase)
+ = fold_build_pointer_plus_loc (input_location, fold_convert (ptype, base),
+ virtual_size);
+ tbase_init = build_stmt (input_location, DECL_EXPR, tbase);
+ controller = build3 (BIND_EXPR, void_type_node, tbase, NULL_TREE, NULL_TREE);
TREE_SIDE_EFFECTS (controller) = 1;
body = build1 (EXIT_EXPR, void_type_node,
/*placement=*/NULL_TREE,
/*alloc_fn=*/NULL_TREE,
complain);
+
+ tree deallocate_call_expr = extract_call_expr (deallocate_expr);
+ if (TREE_CODE (deallocate_call_expr) == CALL_EXPR)
+ CALL_FROM_NEW_OR_DELETE_P (deallocate_call_expr) = 1;
}
body = loop;
else if (!body)
body = deallocate_expr;
else
- /* The delete operator mist be called, even if a destructor
+ /* The delete operator must be called, even if a destructor
throws. */
body = build2 (TRY_FINALLY_EXPR, void_type_node, body, deallocate_expr);
tree compound_stmt;
int destroy_temps;
tree try_block = NULL_TREE;
- int num_initialized_elts = 0;
+ HOST_WIDE_INT num_initialized_elts = 0;
bool is_global;
tree obase = base;
bool xvalue = false;
bool errors = false;
- location_t loc = (init ? EXPR_LOC_OR_LOC (init, input_location)
+ location_t loc = (init ? cp_expr_loc_or_input_loc (init)
: location_of (base));
if (TREE_CODE (atype) == ARRAY_TYPE && TYPE_DOMAIN (atype))
finish_init_stmt (for_stmt);
finish_for_cond (build2 (GT_EXPR, boolean_type_node, iterator,
build_int_cst (TREE_TYPE (iterator), -1)),
- for_stmt, false);
+ for_stmt, false, 0);
elt_init = cp_build_unary_op (PREDECREMENT_EXPR, iterator, false,
complain);
if (elt_init == error_mark_node)
else
from = NULL_TREE;
- if (from_array == 2)
+ if (TREE_CODE (type) == ARRAY_TYPE)
+ elt_init = build_vec_init (to, NULL_TREE, from, /*val_init*/false,
+ from_array, complain);
+ else if (from_array == 2)
elt_init = cp_build_modify_expr (input_location, to, NOP_EXPR,
from, complain);
else if (type_build_ctor_call (type))
else if (TREE_CODE (type) == ARRAY_TYPE)
{
if (init && !BRACE_ENCLOSED_INITIALIZER_P (init))
- sorry
- ("cannot initialize multi-dimensional array with initializer");
- elt_init = build_vec_init (build1 (INDIRECT_REF, type, base),
- 0, init,
- explicit_value_init_p,
- 0, complain);
+ {
+ if ((complain & tf_error))
+ error_at (loc, "array must be initialized "
+ "with a brace-enclosed initializer");
+ elt_init = error_mark_node;
+ }
+ else
+ elt_init = build_vec_init (build1 (INDIRECT_REF, type, base),
+ 0, init,
+ explicit_value_init_p,
+ 0, complain);
}
else if (explicit_value_init_p)
{
if (TREE_CODE (init) == TREE_LIST)
init = build_x_compound_expr_from_list (init, ELK_INIT,
complain);
- elt_init = build2 (INIT_EXPR, type, to, init);
+ elt_init = (init == error_mark_node
+ ? error_mark_node
+ : build2 (INIT_EXPR, type, to, init));
}
}
if (e)
{
- int max = tree_to_shwi (maxindex)+1;
- for (; num_initialized_elts < max; ++num_initialized_elts)
+ HOST_WIDE_INT last = tree_to_shwi (maxindex);
+ if (num_initialized_elts <= last)
{
tree field = size_int (num_initialized_elts);
+ if (num_initialized_elts != last)
+ field = build2 (RANGE_EXPR, sizetype, field,
+ size_int (last));
CONSTRUCTOR_APPEND_ELT (const_vec, field, e);
}
}
}
current_stmt_tree ()->stmts_are_full_exprs_p = 1;
- if (elt_init)
+ if (elt_init && !errors)
finish_expr_stmt (elt_init);
current_stmt_tree ()->stmts_are_full_exprs_p = 0;
{
atype = build_pointer_type (atype);
stmt_expr = build1 (NOP_EXPR, atype, stmt_expr);
- stmt_expr = cp_build_indirect_ref (stmt_expr, RO_NULL, complain);
+ stmt_expr = cp_build_fold_indirect_ref (stmt_expr);
TREE_NO_WARNING (stmt_expr) = 1;
}
tsubst_flags_t complain)
{
tree name;
- tree fn;
switch (dtor_kind)
{
case sfk_complete_destructor:
default:
gcc_unreachable ();
}
- fn = lookup_fnfields (TREE_TYPE (exp), name, /*protect=*/2);
- return build_new_method_call (exp, fn,
- /*args=*/NULL,
- /*conversion_path=*/NULL_TREE,
- flags,
- /*fn_p=*/NULL,
- complain);
+
+ return build_special_member_call (exp, name,
+ /*args=*/NULL,
+ /*binfo=*/TREE_TYPE (exp),
+ flags,
+ complain);
}
/* Generate a call to a destructor. TYPE is the type to cast ADDR to.
if (type == error_mark_node)
return error_mark_node;
- if (TREE_CODE (type) == POINTER_TYPE)
+ if (TYPE_PTR_P (type))
type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
if (TREE_CODE (type) == ARRAY_TYPE)
auto_delete, use_global_delete, complain);
}
+ bool deleting = (auto_delete == sfk_deleting_destructor);
+ gcc_assert (deleting == !(flags & LOOKUP_DESTRUCTOR));
+
if (TYPE_PTR_P (otype))
{
addr = mark_rvalue_use (addr);
complete_type (type);
if (!COMPLETE_TYPE_P (type))
{
- if ((complain & tf_warning)
- && warning (OPT_Wdelete_incomplete,
- "possible problem detected in invocation of "
- "delete operator:"))
+ if (complain & tf_warning)
{
- cxx_incomplete_type_diagnostic (addr, type, DK_WARNING);
- inform (input_location,
- "neither the destructor nor the class-specific "
- "operator delete will be called, even if they are "
- "declared when the class is defined");
+ auto_diagnostic_group d;
+ if (warning (OPT_Wdelete_incomplete,
+ "possible problem detected in invocation of "
+ "%<operator delete%>"))
+ {
+ cxx_incomplete_type_diagnostic (addr, type, DK_WARNING);
+ inform (input_location,
+ "neither the destructor nor the class-specific "
+ "%<operator delete%> will be called, even if "
+ "they are declared when the class is defined");
+ }
}
}
- else if (auto_delete == sfk_deleting_destructor && warn_delnonvdtor
+ else if (deleting && warn_delnonvdtor
&& MAYBE_CLASS_TYPE_P (type) && !CLASSTYPE_FINAL (type)
&& TYPE_POLYMORPHIC_P (type))
{
}
}
}
- if (TREE_SIDE_EFFECTS (addr))
- addr = save_expr (addr);
/* Throw away const and volatile on target type of addr. */
addr = convert_force (build_pointer_type (type), addr, 0, complain);
addr = cp_build_addr_expr (addr, complain);
if (addr == error_mark_node)
return error_mark_node;
- if (TREE_SIDE_EFFECTS (addr))
- addr = save_expr (addr);
addr = convert_force (build_pointer_type (type), addr, 0, complain);
}
- if (TYPE_HAS_TRIVIAL_DESTRUCTOR (type))
- {
- /* Make sure the destructor is callable. */
- if (type_build_dtor_call (type))
- {
- expr = build_dtor_call (cp_build_indirect_ref (addr, RO_NULL,
- complain),
- sfk_complete_destructor, flags, complain);
- if (expr == error_mark_node)
- return error_mark_node;
- }
-
- if (auto_delete != sfk_deleting_destructor)
- return void_node;
+ if (deleting)
+ /* We will use ADDR multiple times so we must save it. */
+ addr = save_expr (addr);
- return build_op_delete_call (DELETE_EXPR, addr,
- cxx_sizeof_nowarn (type),
- use_global_delete,
- /*placement=*/NULL_TREE,
- /*alloc_fn=*/NULL_TREE,
- complain);
- }
- else
+ bool virtual_p = false;
+ if (type_build_dtor_call (type))
{
- tree head = NULL_TREE;
- tree do_delete = NULL_TREE;
- tree ifexp;
-
if (CLASSTYPE_LAZY_DESTRUCTOR (type))
lazily_declare_fn (sfk_destructor, type);
+ virtual_p = DECL_VIRTUAL_P (CLASSTYPE_DESTRUCTOR (type));
+ }
- /* For `::delete x', we must not use the deleting destructor
- since then we would not be sure to get the global `operator
- delete'. */
- if (use_global_delete && auto_delete == sfk_deleting_destructor)
- {
- /* We will use ADDR multiple times so we must save it. */
- addr = save_expr (addr);
- head = get_target_expr (build_headof (addr));
- /* Delete the object. */
- do_delete = build_op_delete_call (DELETE_EXPR,
- head,
- cxx_sizeof_nowarn (type),
- /*global_p=*/true,
- /*placement=*/NULL_TREE,
- /*alloc_fn=*/NULL_TREE,
- complain);
- /* Otherwise, treat this like a complete object destructor
- call. */
- auto_delete = sfk_complete_destructor;
- }
- /* If the destructor is non-virtual, there is no deleting
- variant. Instead, we must explicitly call the appropriate
- `operator delete' here. */
- else if (!DECL_VIRTUAL_P (CLASSTYPE_DESTRUCTOR (type))
- && auto_delete == sfk_deleting_destructor)
- {
- /* We will use ADDR multiple times so we must save it. */
- addr = save_expr (addr);
- /* Build the call. */
- do_delete = build_op_delete_call (DELETE_EXPR,
- addr,
- cxx_sizeof_nowarn (type),
- /*global_p=*/false,
- /*placement=*/NULL_TREE,
- /*alloc_fn=*/NULL_TREE,
- complain);
- /* Call the complete object destructor. */
- auto_delete = sfk_complete_destructor;
- }
- else if (auto_delete == sfk_deleting_destructor
- && TYPE_GETS_REG_DELETE (type))
- {
- /* Make sure we have access to the member op delete, even though
- we'll actually be calling it from the destructor. */
- build_op_delete_call (DELETE_EXPR, addr, cxx_sizeof_nowarn (type),
- /*global_p=*/false,
- /*placement=*/NULL_TREE,
- /*alloc_fn=*/NULL_TREE,
- complain);
- }
+ tree head = NULL_TREE;
+ tree do_delete = NULL_TREE;
+ bool destroying_delete = false;
- expr = build_dtor_call (cp_build_indirect_ref (addr, RO_NULL, complain),
- auto_delete, flags, complain);
- if (expr == error_mark_node)
- return error_mark_node;
- if (do_delete)
- /* The delete operator must be called, regardless of whether
- the destructor throws.
-
- [expr.delete]/7 The deallocation function is called
- regardless of whether the destructor for the object or some
- element of the array throws an exception. */
- expr = build2 (TRY_FINALLY_EXPR, void_type_node, expr, do_delete);
-
- /* We need to calculate this before the dtor changes the vptr. */
- if (head)
- expr = build2 (COMPOUND_EXPR, void_type_node, head, expr);
-
- if (flags & LOOKUP_DESTRUCTOR)
- /* Explicit destructor call; don't check for null pointer. */
- ifexp = integer_one_node;
- else
+ if (!deleting)
+ {
+ /* Leave do_delete null. */
+ }
+ /* For `::delete x', we must not use the deleting destructor
+ since then we would not be sure to get the global `operator
+ delete'. */
+ else if (use_global_delete)
+ {
+ head = get_target_expr (build_headof (addr));
+ /* Delete the object. */
+ do_delete = build_op_delete_call (DELETE_EXPR,
+ head,
+ cxx_sizeof_nowarn (type),
+ /*global_p=*/true,
+ /*placement=*/NULL_TREE,
+ /*alloc_fn=*/NULL_TREE,
+ complain);
+ /* Otherwise, treat this like a complete object destructor
+ call. */
+ auto_delete = sfk_complete_destructor;
+ }
+ /* If the destructor is non-virtual, there is no deleting
+ variant. Instead, we must explicitly call the appropriate
+ `operator delete' here. */
+ else if (!virtual_p)
+ {
+ /* Build the call. */
+ do_delete = build_op_delete_call (DELETE_EXPR,
+ addr,
+ cxx_sizeof_nowarn (type),
+ /*global_p=*/false,
+ /*placement=*/NULL_TREE,
+ /*alloc_fn=*/NULL_TREE,
+ complain);
+ /* Call the complete object destructor. */
+ auto_delete = sfk_complete_destructor;
+ if (do_delete != error_mark_node)
{
- /* Handle deleting a null pointer. */
- warning_sentinel s (warn_address);
- ifexp = cp_build_binary_op (input_location, NE_EXPR, addr,
- nullptr_node, complain);
- if (ifexp == error_mark_node)
- return error_mark_node;
- /* This is a compiler generated comparison, don't emit
- e.g. -Wnonnull-compare warning for it. */
- else if (TREE_CODE (ifexp) == NE_EXPR)
- TREE_NO_WARNING (ifexp) = 1;
+ tree fn = get_callee_fndecl (do_delete);
+ destroying_delete = destroying_delete_p (fn);
}
+ }
+ else if (TYPE_GETS_REG_DELETE (type))
+ {
+ /* Make sure we have access to the member op delete, even though
+ we'll actually be calling it from the destructor. */
+ build_op_delete_call (DELETE_EXPR, addr, cxx_sizeof_nowarn (type),
+ /*global_p=*/false,
+ /*placement=*/NULL_TREE,
+ /*alloc_fn=*/NULL_TREE,
+ complain);
+ }
- if (ifexp != integer_one_node)
- expr = build3 (COND_EXPR, void_type_node, ifexp, expr, void_node);
+ if (!destroying_delete && type_build_dtor_call (type))
+ expr = build_dtor_call (cp_build_fold_indirect_ref (addr),
+ auto_delete, flags, complain);
+ else
+ expr = build_trivial_dtor_call (addr);
+ if (expr == error_mark_node)
+ return error_mark_node;
- return expr;
+ if (!deleting)
+ return expr;
+
+ if (do_delete)
+ {
+ tree do_delete_call_expr = extract_call_expr (do_delete);
+ if (TREE_CODE (do_delete_call_expr) == CALL_EXPR)
+ CALL_FROM_NEW_OR_DELETE_P (do_delete_call_expr) = 1;
}
+
+ if (do_delete && !TREE_SIDE_EFFECTS (expr))
+ expr = do_delete;
+ else if (do_delete)
+ /* The delete operator must be called, regardless of whether
+ the destructor throws.
+
+ [expr.delete]/7 The deallocation function is called
+ regardless of whether the destructor for the object or some
+ element of the array throws an exception. */
+ expr = build2 (TRY_FINALLY_EXPR, void_type_node, expr, do_delete);
+
+ /* We need to calculate this before the dtor changes the vptr. */
+ if (head)
+ expr = build2 (COMPOUND_EXPR, void_type_node, head, expr);
+
+ /* Handle deleting a null pointer. */
+ warning_sentinel s (warn_address);
+ tree ifexp = cp_build_binary_op (input_location, NE_EXPR, addr,
+ nullptr_node, complain);
+ ifexp = cp_fully_fold (ifexp);
+
+ if (ifexp == error_mark_node)
+ return error_mark_node;
+ /* This is a compiler generated comparison, don't emit
+ e.g. -Wnonnull-compare warning for it. */
+ else if (TREE_CODE (ifexp) == NE_EXPR)
+ TREE_NO_WARNING (ifexp) = 1;
+
+ if (!integer_nonzerop (ifexp))
+ expr = build3 (COND_EXPR, void_type_node, ifexp, expr, void_node);
+
+ return expr;
}
/* At the beginning of a destructor, push cleanups that will call the
sizetype, TYPE_SIZE_UNIT (sizetype));
cookie_addr = fold_build_pointer_plus (fold_convert (size_ptr_type, base),
cookie_addr);
- maxindex = cp_build_indirect_ref (cookie_addr, RO_NULL, complain);
+ maxindex = cp_build_fold_indirect_ref (cookie_addr);
}
else if (TREE_CODE (type) == ARRAY_TYPE)
{