/* Handle initialization things in C++.
- Copyright (C) 1987-2015 Free Software Foundation, Inc.
+ Copyright (C) 1987-2018 Free Software Foundation, Inc.
Contributed by Michael Tiemann (tiemann@cygnus.com)
This file is part of GCC.
#include "system.h"
#include "coretypes.h"
#include "target.h"
-#include "tree.h"
#include "cp-tree.h"
#include "stringpool.h"
-#include "alias.h"
#include "varasm.h"
-#include "flags.h"
#include "gimplify.h"
#include "c-family/c-ubsan.h"
+#include "intl.h"
+#include "stringpool.h"
+#include "attribs.h"
+#include "asan.h"
static bool begin_init_stmts (tree *, tree *);
static tree finish_init_stmts (bool, tree, tree);
static tree build_field_list (tree, tree, int *);
static int diagnose_uninitialized_cst_or_ref_member_1 (tree, tree, bool, bool);
+static GTY(()) tree fn;
+
/* We are about to generate some complex initialization code.
Conceptually, it is all a single expression. However, we may want
to include conditionals, loops, and other such statement-level
initialized are initialized to zero. */
;
else if (TYPE_PTR_OR_PTRMEM_P (type))
- init = convert (type, nullptr_node);
+ init = fold (convert (type, nullptr_node));
else if (SCALAR_TYPE_P (type))
- init = convert (type, integer_zero_node);
+ init = fold (convert (type, integer_zero_node));
else if (RECORD_OR_UNION_CODE_P (TREE_CODE (type)))
{
tree field;
else if (VECTOR_TYPE_P (type))
init = build_zero_cst (type);
else
- gcc_assert (TREE_CODE (type) == REFERENCE_TYPE);
+ {
+ gcc_assert (TREE_CODE (type) == REFERENCE_TYPE);
+ init = build_zero_cst (type);
+ }
/* In all cases, the initializer is a constant. */
if (init)
/* Return the non-static data initializer for FIELD_DECL MEMBER. */
+static GTY((cache)) tree_cache_map *nsdmi_inst;
+
tree
-get_nsdmi (tree member, bool in_ctor)
+get_nsdmi (tree member, bool in_ctor, tsubst_flags_t complain)
{
tree init;
tree save_ccp = current_class_ptr;
tree save_ccr = current_class_ref;
- if (!in_ctor)
- {
- /* Use a PLACEHOLDER_EXPR when we don't have a 'this' parameter to
- refer to; constexpr evaluation knows what to do with it. */
- current_class_ref = build0 (PLACEHOLDER_EXPR, DECL_CONTEXT (member));
- current_class_ptr = build_address (current_class_ref);
- }
-
if (DECL_LANG_SPECIFIC (member) && DECL_TEMPLATE_INFO (member))
{
init = DECL_INITIAL (DECL_TI_TEMPLATE (member));
+ location_t expr_loc
+ = EXPR_LOC_OR_LOC (init, DECL_SOURCE_LOCATION (member));
+ tree *slot;
if (TREE_CODE (init) == DEFAULT_ARG)
- goto unparsed;
-
+ /* Unparsed. */;
+ else if (nsdmi_inst && (slot = nsdmi_inst->get (member)))
+ init = *slot;
/* Check recursive instantiation. */
- if (DECL_INSTANTIATING_NSDMI_P (member))
+ else if (DECL_INSTANTIATING_NSDMI_P (member))
{
- error ("recursive instantiation of non-static data member "
- "initializer for %qD", member);
+ if (complain & tf_error)
+ error_at (expr_loc, "recursive instantiation of default member "
+ "initializer for %qD", member);
init = error_mark_node;
}
else
{
+ int un = cp_unevaluated_operand;
+ cp_unevaluated_operand = 0;
+
+ location_t sloc = input_location;
+ input_location = expr_loc;
+
DECL_INSTANTIATING_NSDMI_P (member) = 1;
-
+
+ inject_this_parameter (DECL_CONTEXT (member), TYPE_UNQUALIFIED);
+
+ start_lambda_scope (member);
+
/* Do deferred instantiation of the NSDMI. */
init = (tsubst_copy_and_build
(init, DECL_TI_ARGS (member),
- tf_warning_or_error, member, /*function_p=*/false,
+ complain, member, /*function_p=*/false,
/*integral_constant_expression_p=*/false));
- init = digest_nsdmi_init (member, init);
-
+ init = digest_nsdmi_init (member, init, complain);
+
+ finish_lambda_scope ();
+
DECL_INSTANTIATING_NSDMI_P (member) = 0;
+
+ if (init != error_mark_node)
+ {
+ if (!nsdmi_inst)
+ nsdmi_inst = tree_cache_map::create_ggc (37);
+ nsdmi_inst->put (member, init);
+ }
+
+ input_location = sloc;
+ cp_unevaluated_operand = un;
}
}
else
+ init = DECL_INITIAL (member);
+
+ if (init && TREE_CODE (init) == DEFAULT_ARG)
{
- init = DECL_INITIAL (member);
- if (init && TREE_CODE (init) == DEFAULT_ARG)
+ if (complain & tf_error)
{
- unparsed:
- error ("constructor required before non-static data member "
- "for %qD has been parsed", member);
+ error ("default member initializer for %qD required before the end "
+ "of its enclosing class", member);
+ inform (location_of (init), "defined here");
DECL_INITIAL (member) = error_mark_node;
- init = error_mark_node;
}
- /* Strip redundant TARGET_EXPR so we don't need to remap it, and
- so the aggregate init code below will see a CONSTRUCTOR. */
- if (init && SIMPLE_TARGET_EXPR_P (init))
- init = TARGET_EXPR_INITIAL (init);
- init = break_out_target_exprs (init);
+ init = error_mark_node;
+ }
+
+ if (in_ctor)
+ {
+ current_class_ptr = save_ccp;
+ current_class_ref = save_ccr;
+ }
+ else
+ {
+ /* Use a PLACEHOLDER_EXPR when we don't have a 'this' parameter to
+ refer to; constexpr evaluation knows what to do with it. */
+ current_class_ref = build0 (PLACEHOLDER_EXPR, DECL_CONTEXT (member));
+ current_class_ptr = build_address (current_class_ref);
}
+
+ /* Strip redundant TARGET_EXPR so we don't need to remap it, and
+ so the aggregate init code below will see a CONSTRUCTOR. */
+ bool simple_target = (init && SIMPLE_TARGET_EXPR_P (init));
+ if (simple_target)
+ init = TARGET_EXPR_INITIAL (init);
+ init = break_out_target_exprs (init);
+ if (simple_target && TREE_CODE (init) != CONSTRUCTOR)
+ /* Now put it back so C++17 copy elision works. */
+ init = get_target_expr (init);
+
current_class_ptr = save_ccp;
current_class_ref = save_ccr;
return init;
}
+/* Diagnose the flexible array MEMBER if its INITializer is non-null
+ and return true if so. Otherwise return false. */
+
+bool
+maybe_reject_flexarray_init (tree member, tree init)
+{
+ tree type = TREE_TYPE (member);
+
+ if (!init
+ || TREE_CODE (type) != ARRAY_TYPE
+ || TYPE_DOMAIN (type))
+ return false;
+
+ /* Point at the flexible array member declaration if it's initialized
+ in-class, and at the ctor if it's initialized in a ctor member
+ initializer list. */
+ location_t loc;
+ if (DECL_INITIAL (member) == init
+ || !current_function_decl
+ || DECL_DEFAULTED_FN (current_function_decl))
+ loc = DECL_SOURCE_LOCATION (member);
+ else
+ loc = DECL_SOURCE_LOCATION (current_function_decl);
+
+ error_at (loc, "initializer for flexible array member %q#D", member);
+ return true;
+}
+
/* Initialize MEMBER, a FIELD_DECL, with INIT, a TREE_LIST of
arguments. If TREE_LIST is void_type_node, an empty initializer
list was given; if NULL_TREE no initializer was given. */
/* Use the non-static data member initializer if there was no
mem-initializer for this field. */
if (init == NULL_TREE)
- init = get_nsdmi (member, /*ctor*/true);
+ init = get_nsdmi (member, /*ctor*/true, tf_warning_or_error);
if (init == error_mark_node)
return;
{
if (init)
{
- if (TREE_CHAIN (init))
+ /* Check to make sure the member initializer is valid and
+ something like a CONSTRUCTOR in: T a[] = { 1, 2 } and
+ if it isn't, return early to avoid triggering another
+ error below. */
+ if (maybe_reject_flexarray_init (member, init))
+ return;
+
+ if (TREE_CODE (init) != TREE_LIST || TREE_CHAIN (init))
init = error_mark_node;
else
init = TREE_VALUE (init);
+
if (BRACE_ENCLOSED_INITIALIZER_P (init))
init = digest_init (type, init, tf_warning_or_error);
}
|| same_type_ignoring_top_level_qualifiers_p (type,
TREE_TYPE (init)))
{
- init = build_vec_init_expr (type, init, tf_warning_or_error);
- init = build2 (INIT_EXPR, type, decl, init);
- finish_expr_stmt (init);
+ if (TYPE_DOMAIN (type) && TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
+ {
+ /* Initialize the array only if it's not a flexible
+ array member (i.e., if it has an upper bound). */
+ init = build_vec_init_expr (type, init, tf_warning_or_error);
+ init = build2 (INIT_EXPR, type, decl, init);
+ finish_expr_stmt (init);
+ }
}
else
error ("invalid initializer for array member %q#D", member);
init = build_x_compound_expr_from_list (init, ELK_MEM_INIT,
tf_warning_or_error);
- if (init)
- finish_expr_stmt (cp_build_modify_expr (decl, INIT_EXPR, init,
+ /* Reject a member initializer for a flexible array member. */
+ if (init && !maybe_reject_flexarray_init (member, init))
+ finish_expr_stmt (cp_build_modify_expr (input_location, decl,
+ INIT_EXPR, init,
tf_warning_or_error));
}
return sorted_inits;
}
+/* Callback for cp_walk_tree to mark all PARM_DECLs in a tree as read. */
+
+static tree
+mark_exp_read_r (tree *tp, int *, void *)
+{
+ tree t = *tp;
+ if (TREE_CODE (t) == PARM_DECL)
+ mark_exp_read (t);
+ return NULL_TREE;
+}
+
/* Initialize all bases and members of CURRENT_CLASS_TYPE. MEM_INITS
is a TREE_LIST giving the explicit mem-initializer-list for the
constructor. The TREE_PURPOSE of each entry is a subobject (a
}
if (DECL_DEFAULTED_FN (current_function_decl)
- && ! DECL_INHERITED_CTOR_BASE (current_function_decl))
+ && ! DECL_INHERITED_CTOR (current_function_decl))
flags |= LOOKUP_DEFAULTED;
/* Sort the mem-initializers into the order in which the
if (arguments == error_mark_node)
continue;
+ /* Suppress access control when calling the inherited ctor. */
+ bool inherited_base = (DECL_INHERITED_CTOR (current_function_decl)
+ && flag_new_inheriting_ctors
+ && arguments);
+ if (inherited_base)
+ push_deferring_access_checks (dk_deferred);
+
if (arguments == NULL_TREE)
{
/* If these initializations are taking place in a copy constructor,
}
/* Initialize the base. */
- if (BINFO_VIRTUAL_P (subobject))
- construct_virtual_base (subobject, arguments);
- else
+ if (!BINFO_VIRTUAL_P (subobject))
{
tree base_addr;
base_addr = build_base_path (PLUS_EXPR, current_class_ptr,
subobject, 1, tf_warning_or_error);
expand_aggr_init_1 (subobject, NULL_TREE,
- cp_build_indirect_ref (base_addr, RO_NULL,
- tf_warning_or_error),
+ cp_build_fold_indirect_ref (base_addr),
arguments,
flags,
tf_warning_or_error);
expand_cleanup_for_base (subobject, NULL_TREE);
}
+ else if (!ABSTRACT_CLASS_TYPE_P (current_class_type))
+ /* C++14 DR1658 Means we do not have to construct vbases of
+ abstract classes. */
+ construct_virtual_base (subobject, arguments);
+ else
+ /* When not constructing vbases of abstract classes, at least mark
+ the arguments expressions as read to avoid
+ -Wunused-but-set-parameter false positives. */
+ cp_walk_tree (&arguments, mark_exp_read_r, NULL, NULL);
+
+ if (inherited_base)
+ pop_deferring_access_checks ();
}
in_base_initializer = 0;
/* Compute the value to use, when there's a VTT. */
vtt_parm = current_vtt_parm;
vtbl2 = fold_build_pointer_plus (vtt_parm, vtt_index);
- vtbl2 = cp_build_indirect_ref (vtbl2, RO_NULL, tf_warning_or_error);
+ vtbl2 = cp_build_fold_indirect_ref (vtbl2);
vtbl2 = convert (TREE_TYPE (vtbl), vtbl2);
/* The actual initializer is the VTT value only in the subobject
constructor. In maybe_clone_body we'll substitute NULL for
the vtt_parm in the case of the non-subobject constructor. */
- vtbl = build3 (COND_EXPR,
- TREE_TYPE (vtbl),
- build2 (EQ_EXPR, boolean_type_node,
- current_in_charge_parm, integer_zero_node),
- vtbl2,
- vtbl);
+ vtbl = build_if_in_charge (vtbl, vtbl2);
}
/* Compute the location of the vtpr. */
- vtbl_ptr = build_vfield_ref (cp_build_indirect_ref (decl, RO_NULL,
- tf_warning_or_error),
+ vtbl_ptr = build_vfield_ref (cp_build_fold_indirect_ref (decl),
TREE_TYPE (binfo));
gcc_assert (vtbl_ptr != error_mark_node);
/* Assign the vtable to the vptr. */
vtbl = convert_force (TREE_TYPE (vtbl_ptr), vtbl, 0, tf_warning_or_error);
- finish_expr_stmt (cp_build_modify_expr (vtbl_ptr, NOP_EXPR, vtbl,
- tf_warning_or_error));
+ finish_expr_stmt (cp_build_modify_expr (input_location, vtbl_ptr, NOP_EXPR,
+ vtbl, tf_warning_or_error));
}
/* If an exception is thrown in a constructor, those base classes already
if (init == error_mark_node)
return error_mark_node;
+ location_t init_loc = (init
+ ? EXPR_LOC_OR_LOC (init, input_location)
+ : location_of (exp));
+
TREE_READONLY (exp) = 0;
TREE_THIS_VOLATILE (exp) = 0;
- if (init && init != void_type_node
- && TREE_CODE (init) != TREE_LIST
- && !(TREE_CODE (init) == TARGET_EXPR
- && TARGET_EXPR_DIRECT_INIT_P (init))
- && !DIRECT_LIST_INIT_P (init))
- flags |= LOOKUP_ONLYCONVERTING;
-
if (TREE_CODE (type) == ARRAY_TYPE)
{
- tree itype;
+ tree itype = init ? TREE_TYPE (init) : NULL_TREE;
+ int from_array = 0;
- /* An array may not be initialized use the parenthesized
- initialization form -- unless the initializer is "()". */
- if (init && TREE_CODE (init) == TREE_LIST)
+ if (VAR_P (exp) && DECL_DECOMPOSITION_P (exp))
{
- if (complain & tf_error)
- error ("bad array initializer");
- return error_mark_node;
+ from_array = 1;
+ if (init && DECL_P (init)
+ && !(flags & LOOKUP_ONLYCONVERTING))
+ {
+ /* Wrap the initializer in a CONSTRUCTOR so that build_vec_init
+ recognizes it as direct-initialization. */
+ init = build_constructor_single (init_list_type_node,
+ NULL_TREE, init);
+ CONSTRUCTOR_IS_DIRECT_INIT (init) = true;
+ }
+ }
+ else
+ {
+ /* An array may not be initialized use the parenthesized
+ initialization form -- unless the initializer is "()". */
+ if (init && TREE_CODE (init) == TREE_LIST)
+ {
+ if (complain & tf_error)
+ error ("bad array initializer");
+ return error_mark_node;
+ }
+ /* Must arrange to initialize each element of EXP
+ from elements of INIT. */
+ if (cv_qualified_p (type))
+ TREE_TYPE (exp) = cv_unqualified (type);
+ if (itype && cv_qualified_p (itype))
+ TREE_TYPE (init) = cv_unqualified (itype);
+ from_array = (itype && same_type_p (TREE_TYPE (init),
+ TREE_TYPE (exp)));
+
+ if (init && !from_array
+ && !BRACE_ENCLOSED_INITIALIZER_P (init))
+ {
+ if (complain & tf_error)
+ permerror (init_loc, "array must be initialized "
+ "with a brace-enclosed initializer");
+ else
+ return error_mark_node;
+ }
}
- /* Must arrange to initialize each element of EXP
- from elements of INIT. */
- itype = init ? TREE_TYPE (init) : NULL_TREE;
- if (cv_qualified_p (type))
- TREE_TYPE (exp) = cv_unqualified (type);
- if (itype && cv_qualified_p (itype))
- TREE_TYPE (init) = cv_unqualified (itype);
+
stmt_expr = build_vec_init (exp, NULL_TREE, init,
/*explicit_value_init_p=*/false,
- itype && same_type_p (TREE_TYPE (init),
- TREE_TYPE (exp)),
+ from_array,
complain);
TREE_READONLY (exp) = was_const;
TREE_THIS_VOLATILE (exp) = was_volatile;
return stmt_expr;
}
+ if (init && init != void_type_node
+ && TREE_CODE (init) != TREE_LIST
+ && !(TREE_CODE (init) == TARGET_EXPR
+ && TARGET_EXPR_DIRECT_INIT_P (init))
+ && !DIRECT_LIST_INIT_P (init))
+ flags |= LOOKUP_ONLYCONVERTING;
+
if ((VAR_P (exp) || TREE_CODE (exp) == PARM_DECL)
&& !lookup_attribute ("warn_unused", TYPE_ATTRIBUTES (type)))
/* Just know that we've seen something for this node. */
tsubst_flags_t complain)
{
tree type = TREE_TYPE (exp);
- tree ctor_name;
/* It fails because there may not be a constructor which takes
its own type as the first (or only parameter), but which does
gcc_checking_assert ((flags & LOOKUP_ONLYCONVERTING) == 0
&& TREE_CHAIN (init) == NULL_TREE);
init = TREE_VALUE (init);
+ /* Only call reshape_init if it has not been called earlier
+ by the callers. */
+ if (BRACE_ENCLOSED_INITIALIZER_P (init) && CP_AGGREGATE_TYPE_P (type))
+ init = reshape_init (type, init, complain);
}
if (init && BRACE_ENCLOSED_INITIALIZER_P (init)
&& CP_AGGREGATE_TYPE_P (type))
/* A brace-enclosed initializer for an aggregate. In C++0x this can
happen for direct-initialization, too. */
- {
- init = reshape_init (type, init, complain);
- init = digest_init (type, init, complain);
- }
+ init = digest_init (type, init, complain);
/* A CONSTRUCTOR of the target's type is a previously digested
initializer, whether that happened just above or in
&parms, binfo, flags,
complain);
base = fold_build_cleanup_point_expr (void_type_node, base);
- rval = build3 (COND_EXPR, void_type_node,
- build2 (EQ_EXPR, boolean_type_node,
- current_in_charge_parm, integer_zero_node),
- base,
- complete);
+ rval = build_if_in_charge (complete, base);
}
else
{
- if (true_exp == exp)
- ctor_name = complete_ctor_identifier;
- else
- ctor_name = base_ctor_identifier;
+ tree ctor_name = (true_exp == exp
+ ? complete_ctor_identifier : base_ctor_identifier);
+
rval = build_special_member_call (exp, ctor_name, &parms, binfo, flags,
complain);
- }
+ }
if (parms != NULL)
release_tree_vector (parms);
return;
}
+ /* List-initialization from {} becomes value-initialization for non-aggregate
+ classes with default constructors. Handle this here when we're
+ initializing a base, so protected access works. */
+ if (exp != true_exp && init && TREE_CODE (init) == TREE_LIST)
+ {
+ tree elt = TREE_VALUE (init);
+ if (DIRECT_LIST_INIT_P (elt)
+ && CONSTRUCTOR_ELTS (elt) == 0
+ && CLASSTYPE_NON_AGGREGATE (type)
+ && TYPE_HAS_DEFAULT_CONSTRUCTOR (type))
+ init = void_type_node;
+ }
+
/* If an explicit -- but empty -- initializer list was present,
that's value-initialization. */
if (init == void_type_node)
if (TREE_CODE (t) != TEMPLATE_ID_EXPR && !really_overloaded_fn (t))
{
/* Get rid of a potential OVERLOAD around it. */
- t = OVL_CURRENT (t);
+ t = OVL_FIRST (t);
/* Unique functions are handled easily. */
If the access is to form a pointer to member, the
nested-name-specifier shall name the derived class
(or any class derived from that class). */
+ bool ok;
if (address_p && DECL_P (t)
&& DECL_NONSTATIC_MEMBER_P (t))
- perform_or_defer_access_check (TYPE_BINFO (type), t, t,
- complain);
+ ok = perform_or_defer_access_check (TYPE_BINFO (type), t, t,
+ complain);
else
- perform_or_defer_access_check (basebinfo, t, t,
- complain);
-
+ ok = perform_or_defer_access_check (basebinfo, t, t,
+ complain);
+ if (!ok)
+ return error_mark_node;
if (DECL_STATIC_FUNCTION_P (t))
return t;
member = t;
TREE_TYPE (member) = unknown_type_node;
}
else if (address_p && TREE_CODE (member) == FIELD_DECL)
- /* We need additional test besides the one in
- check_accessibility_of_qualified_id in case it is
- a pointer to non-static member. */
- perform_or_defer_access_check (TYPE_BINFO (type), member, member,
- complain);
+ {
+ /* We need additional test besides the one in
+ check_accessibility_of_qualified_id in case it is
+ a pointer to non-static member. */
+ if (!perform_or_defer_access_check (TYPE_BINFO (type), member, member,
+ complain))
+ return error_mark_node;
+ }
if (!address_p)
{
constant_value_1 (tree decl, bool strict_p, bool return_aggregate_cst_ok_p)
{
while (TREE_CODE (decl) == CONST_DECL
- || (strict_p
- ? decl_constant_var_p (decl)
- : (VAR_P (decl)
- && CP_TYPE_CONST_NON_VOLATILE_P (TREE_TYPE (decl)))))
+ || decl_constant_var_p (decl)
+ || (!strict_p && VAR_P (decl)
+ && CP_TYPE_CONST_NON_VOLATILE_P (TREE_TYPE (decl))))
{
tree init;
/* If DECL is a static data member in a template
initializer for the static data member is not processed
until needed; we need it now. */
mark_used (decl, tf_none);
- mark_rvalue_use (decl);
init = DECL_INITIAL (decl);
if (init == error_mark_node)
{
- if (DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (decl))
+ if (TREE_CODE (decl) == CONST_DECL
+ || DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (decl))
/* Treat the error as a constant to avoid cascading errors on
excessively recursive template instantiation (c++/9335). */
return init;
&& TREE_CODE (init) == TREE_LIST
&& TREE_CHAIN (init) == NULL_TREE)
init = TREE_VALUE (init);
+ /* Instantiate a non-dependent initializer for user variables. We
+ mustn't do this for the temporary for an array compound literal;
+ trying to instatiate the initializer will keep creating new
+ temporaries until we crash. Probably it's not useful to do it for
+ other artificial variables, either. */
+ if (!DECL_ARTIFICIAL (decl))
+ init = instantiate_non_dependent_or_null (init);
if (!init
|| !TREE_TYPE (init)
|| !TREE_CONSTANT (init)
&& (TREE_CODE (init) == CONSTRUCTOR
|| TREE_CODE (init) == STRING_CST)))
break;
+ /* Don't return a CONSTRUCTOR for a variable with partial run-time
+ initialization, since it doesn't represent the entire value. */
+ if (TREE_CODE (init) == CONSTRUCTOR
+ && !DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (decl))
+ break;
+ /* If the variable has a dynamic initializer, don't use its
+ DECL_INITIAL which doesn't reflect the real value. */
+ if (VAR_P (decl)
+ && TREE_STATIC (decl)
+ && !DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (decl)
+ && DECL_NONTRIVIALLY_INITIALIZED_P (decl))
+ break;
decl = unshare_expr (init);
}
return decl;
else if (init->is_empty ())
init_list = void_node;
else
- init_list = build_tree_list_vec (init);
+ {
+ init_list = build_tree_list_vec (init);
+ for (tree v = init_list; v; v = TREE_CHAIN (v))
+ if (TREE_CODE (TREE_VALUE (v)) == OVERLOAD)
+ lookup_keep (TREE_VALUE (v), true);
+ }
new_expr = build4 (NEW_EXPR, build_pointer_type (type),
build_tree_list_vec (placement), type, nelts,
tree
throw_bad_array_new_length (void)
{
- tree fn = get_identifier ("__cxa_throw_bad_array_new_length");
- if (!get_global_value_if_present (fn, &fn))
- fn = push_throw_library_fn (fn, build_function_type_list (sizetype,
- NULL_TREE));
+ if (!fn)
+ {
+ tree name = get_identifier ("__cxa_throw_bad_array_new_length");
+
+ fn = get_global_binding (name);
+ if (!fn)
+ fn = push_throw_library_fn
+ (name, build_function_type_list (sizetype, NULL_TREE));
+ }
return build_cxx_call (fn, 0, NULL, tf_warning_or_error);
}
+/* Attempt to find the initializer for flexible array field T in the
+ initializer INIT, when non-null. Returns the initializer when
+ successful and NULL otherwise. */
+static tree
+find_flexarray_init (tree t, tree init)
+{
+ if (!init || init == error_mark_node)
+ return NULL_TREE;
+
+ unsigned HOST_WIDE_INT idx;
+ tree field, elt;
+
+ /* Iterate over all top-level initializer elements. */
+ FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), idx, field, elt)
+ /* If the member T is found, return it. */
+ if (field == t)
+ return elt;
+
+ return NULL_TREE;
+}
+
/* Attempt to verify that the argument, OPER, of a placement new expression
refers to an object sufficiently large for an object of TYPE or an array
of NELTS of such objects when NELTS is non-null, and issue a warning when
/* The number of bytes to add to or subtract from the size of the provided
buffer based on an offset into an array or an array element reference.
- Although intermediate results may be negative (as in a[3] - 2) the final
- result cannot be. */
- HOST_WIDE_INT adjust = 0;
+ Although intermediate results may be negative (as in a[3] - 2) a valid
+ final result cannot be. */
+ offset_int adjust = 0;
/* True when the size of the entire destination object should be used
to compute the possibly optimistic estimate of the available space. */
bool use_obj_size = false;
to placement new is not checked since it's unknown what it might
point to. */
if (TREE_CODE (oper) == PARM_DECL
- || TREE_CODE (oper) == VAR_DECL
+ || VAR_P (oper)
|| TREE_CODE (oper) == COMPONENT_REF)
return;
is a constant. */
if (TREE_CODE (oper) == POINTER_PLUS_EXPR)
{
- /* If the offset is comple-time constant, use it to compute a more
+ /* If the offset is compile-time constant, use it to compute a more
accurate estimate of the size of the buffer. Since the operand
of POINTER_PLUS_EXPR is represented as an unsigned type, convert
it to signed first.
estimate (this may lead to false negatives). */
tree adj = TREE_OPERAND (oper, 1);
if (CONSTANT_CLASS_P (adj))
- adjust += tree_to_shwi (convert (ssizetype, adj));
+ adjust += wi::to_offset (convert (ssizetype, adj));
else
use_obj_size = true;
STRIP_NOPS (oper);
- if (TREE_CODE (oper) == ARRAY_REF)
+ if (TREE_CODE (oper) == ARRAY_REF
+ && (addr_expr || TREE_CODE (TREE_TYPE (oper)) == ARRAY_TYPE))
{
/* Similar to the offset computed above, see if the array index
is a compile-time constant. If so, and unless the offset was
not a compile-time constant, use the index to determine the
size of the buffer. Otherwise, use the entire array as
an optimistic estimate of the size. */
- const_tree adj = TREE_OPERAND (oper, 1);
+ const_tree adj = fold_non_dependent_expr (TREE_OPERAND (oper, 1));
if (!use_obj_size && CONSTANT_CLASS_P (adj))
- adjust += tree_to_shwi (adj);
+ adjust += wi::to_offset (adj);
else
{
use_obj_size = true;
oper = TREE_OPERAND (oper, 0);
}
+ /* Refers to the declared object that constains the subobject referenced
+ by OPER. When the object is initialized, makes it possible to determine
+ the actual size of a flexible array member used as the buffer passed
+ as OPER to placement new. */
+ tree var_decl = NULL_TREE;
+ /* True when operand is a COMPONENT_REF, to distinguish flexible array
+ members from arrays of unspecified size. */
+ bool compref = TREE_CODE (oper) == COMPONENT_REF;
+
+ /* For COMPONENT_REF (i.e., a struct member) the size of the entire
+ enclosing struct. Used to validate the adjustment (offset) into
+ an array at the end of a struct. */
+ offset_int compsize = 0;
+
/* Descend into a struct or union to find the member whose address
- is being used as the agument. */
- while (TREE_CODE (oper) == COMPONENT_REF)
- oper = TREE_OPERAND (oper, 1);
+ is being used as the argument. */
+ if (TREE_CODE (oper) == COMPONENT_REF)
+ {
+ tree comptype = TREE_TYPE (TREE_OPERAND (oper, 0));
+ compsize = wi::to_offset (TYPE_SIZE_UNIT (comptype));
+
+ tree op0 = oper;
+ while (TREE_CODE (op0 = TREE_OPERAND (op0, 0)) == COMPONENT_REF);
+ if (VAR_P (op0))
+ var_decl = op0;
+ oper = TREE_OPERAND (oper, 1);
+ }
- if ((addr_expr || !POINTER_TYPE_P (TREE_TYPE (oper)))
- && (TREE_CODE (oper) == VAR_DECL
+ tree opertype = TREE_TYPE (oper);
+ if ((addr_expr || !POINTER_TYPE_P (opertype))
+ && (VAR_P (oper)
|| TREE_CODE (oper) == FIELD_DECL
|| TREE_CODE (oper) == PARM_DECL))
{
/* A possibly optimistic estimate of the number of bytes available
in the destination buffer. */
- unsigned HOST_WIDE_INT bytes_avail;
+ offset_int bytes_avail = 0;
/* True when the estimate above is in fact the exact size
of the destination buffer rather than an estimate. */
bool exact_size = true;
/* Treat members of unions and members of structs uniformly, even
though the size of a member of a union may be viewed as extending
to the end of the union itself (it is by __builtin_object_size). */
- if ((TREE_CODE (oper) == VAR_DECL || use_obj_size)
- && DECL_SIZE_UNIT (oper))
+ if ((VAR_P (oper) || use_obj_size)
+ && DECL_SIZE_UNIT (oper)
+ && tree_fits_uhwi_p (DECL_SIZE_UNIT (oper)))
{
/* Use the size of the entire array object when the expression
refers to a variable or its size depends on an expression
that's not a compile-time constant. */
- bytes_avail = tree_to_uhwi (DECL_SIZE_UNIT (oper));
+ bytes_avail = wi::to_offset (DECL_SIZE_UNIT (oper));
exact_size = !use_obj_size;
}
- else if (TYPE_SIZE_UNIT (TREE_TYPE (oper)))
+ else if (tree opersize = TYPE_SIZE_UNIT (opertype))
{
/* Use the size of the type of the destination buffer object
- as the optimistic estimate of the available space in it. */
- bytes_avail = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (oper)));
+ as the optimistic estimate of the available space in it.
+ Use the maximum possible size for zero-size arrays and
+ flexible array members (except of initialized objects
+ thereof). */
+ if (TREE_CODE (opersize) == INTEGER_CST)
+ bytes_avail = wi::to_offset (opersize);
}
- else
+
+ if (bytes_avail == 0)
{
- /* Bail if neither the size of the object nor its type is known. */
- return;
+ if (var_decl)
+ {
+ /* Constructing into a buffer provided by the flexible array
+ member of a declared object (which is permitted as a G++
+ extension). If the array member has been initialized,
+ determine its size from the initializer. Otherwise,
+ the array size is zero. */
+ if (tree init = find_flexarray_init (oper,
+ DECL_INITIAL (var_decl)))
+ bytes_avail = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (init)));
+ }
+ else
+ bytes_avail = (wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node))
+ - compsize);
}
- /* Avoid diagnosing flexible array members (accepted as an extension
- and diagnosed with -Wpedantic).
- Constructing objects that appear to overflow the C99 equivalent of
- flexible array members (i.e., array members of size zero or one)
- are diagnosed in C++ since their declaration cannot be diagnosed. */
- if (bytes_avail == 0 && TREE_CODE (TREE_TYPE (oper)) == ARRAY_TYPE)
- return;
+ tree_code oper_code = TREE_CODE (opertype);
- /* The size of the buffer can only be adjusted down but not up. */
- gcc_checking_assert (0 <= adjust);
+ if (compref && oper_code == ARRAY_TYPE)
+ {
+ tree nelts = array_type_nelts_top (opertype);
+ tree nelts_cst = maybe_constant_value (nelts);
+ if (TREE_CODE (nelts_cst) == INTEGER_CST
+ && integer_onep (nelts_cst)
+ && !var_decl
+ && warn_placement_new < 2)
+ return;
+ }
/* Reduce the size of the buffer by the adjustment computed above
from the offset and/or the index into the array. */
- if (bytes_avail < static_cast<unsigned HOST_WIDE_INT>(adjust))
+ if (bytes_avail < adjust || adjust < 0)
bytes_avail = 0;
else
- bytes_avail -= adjust;
+ {
+ tree elttype = (TREE_CODE (opertype) == ARRAY_TYPE
+ ? TREE_TYPE (opertype) : opertype);
+ if (tree eltsize = TYPE_SIZE_UNIT (elttype))
+ {
+ bytes_avail -= adjust * wi::to_offset (eltsize);
+ if (bytes_avail < 0)
+ bytes_avail = 0;
+ }
+ }
/* The minimum amount of space needed for the allocation. This
is an optimistic estimate that makes it possible to detect
placement new invocation for some undersize buffers but not
others. */
- unsigned HOST_WIDE_INT bytes_need;
+ offset_int bytes_need;
if (CONSTANT_CLASS_P (size))
- bytes_need = tree_to_uhwi (size);
+ bytes_need = wi::to_offset (size);
else if (nelts && CONSTANT_CLASS_P (nelts))
- bytes_need = tree_to_uhwi (nelts)
- * tree_to_uhwi (TYPE_SIZE_UNIT (type));
+ bytes_need = (wi::to_offset (nelts)
+ * wi::to_offset (TYPE_SIZE_UNIT (type)));
+ else if (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
+ bytes_need = wi::to_offset (TYPE_SIZE_UNIT (type));
else
- bytes_need = tree_to_uhwi (TYPE_SIZE_UNIT (type));
+ {
+ /* The type is a VLA. */
+ return;
+ }
if (bytes_avail < bytes_need)
{
if (nelts)
if (CONSTANT_CLASS_P (nelts))
- warning_at (loc, OPT_Wplacement_new,
+ warning_at (loc, OPT_Wplacement_new_,
exact_size ?
"placement new constructing an object of type "
"%<%T [%wu]%> and size %qwu in a region of type %qT "
: "placement new constructing an object of type "
"%<%T [%wu]%> and size %qwu in a region of type %qT "
"and size at most %qwu",
- type, tree_to_uhwi (nelts), bytes_need,
- TREE_TYPE (oper),
- bytes_avail);
+ type, tree_to_uhwi (nelts), bytes_need.to_uhwi (),
+ opertype, bytes_avail.to_uhwi ());
else
- warning_at (loc, OPT_Wplacement_new,
+ warning_at (loc, OPT_Wplacement_new_,
exact_size ?
"placement new constructing an array of objects "
"of type %qT and size %qwu in a region of type %qT "
: "placement new constructing an array of objects "
"of type %qT and size %qwu in a region of type %qT "
"and size at most %qwu",
- type, bytes_need, TREE_TYPE (oper),
- bytes_avail);
+ type, bytes_need.to_uhwi (), opertype,
+ bytes_avail.to_uhwi ());
else
- warning_at (loc, OPT_Wplacement_new,
+ warning_at (loc, OPT_Wplacement_new_,
exact_size ?
"placement new constructing an object of type %qT "
"and size %qwu in a region of type %qT and size %qwi"
- : "placement new constructing an object of type %qT"
+ : "placement new constructing an object of type %qT "
"and size %qwu in a region of type %qT and size "
"at most %qwu",
- type, bytes_need, TREE_TYPE (oper),
- bytes_avail);
+ type, bytes_need.to_uhwi (), opertype,
+ bytes_avail.to_uhwi ());
}
}
}
+/* True if alignof(T) > __STDCPP_DEFAULT_NEW_ALIGNMENT__. */
+
+bool
+type_has_new_extended_alignment (tree t)
+{
+ return (aligned_new_threshold
+ && TYPE_ALIGN_UNIT (t) > (unsigned)aligned_new_threshold);
+}
+
+/* Return the alignment we expect malloc to guarantee. This should just be
+ MALLOC_ABI_ALIGNMENT, but that macro defaults to only BITS_PER_WORD for some
+ reason, so don't let the threshold be smaller than max_align_t_align. */
+
+unsigned
+malloc_alignment ()
+{
+ return MAX (max_align_t_align(), MALLOC_ABI_ALIGNMENT);
+}
+
+/* Determine whether an allocation function is a namespace-scope
+ non-replaceable placement new function. See DR 1748.
+ TODO: Enable in all standard modes. */
+static bool
+std_placement_new_fn_p (tree alloc_fn)
+{
+ if (DECL_NAMESPACE_SCOPE_P (alloc_fn))
+ {
+ tree first_arg = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (alloc_fn)));
+ if ((TREE_VALUE (first_arg) == ptr_type_node)
+ && TREE_CHAIN (first_arg) == void_list_node)
+ return true;
+ }
+ return false;
+}
+
/* Generate code for a new-expression, including calling the "operator
new" function, initializing the object, and, if an exception occurs
during construction, cleaning up. The arguments are as for
- build_raw_new_expr. This may change PLACEMENT and INIT. */
+ build_raw_new_expr. This may change PLACEMENT and INIT.
+ TYPE is the type of the object being constructed, possibly an array
+ of NELTS elements when NELTS is non-null (in "new T[NELTS]", T may
+ be an array of the form U[inner], with the whole expression being
+ "new U[NELTS][inner]"). */
static tree
build_new_1 (vec<tree, va_gc> **placement, tree type, tree nelts,
type.) */
tree pointer_type;
tree non_const_pointer_type;
+ /* The most significant array bound in int[OUTER_NELTS][inner]. */
tree outer_nelts = NULL_TREE;
- /* For arrays, a bounds checks on the NELTS parameter. */
+ /* For arrays with a non-constant number of elements, a bounds checks
+ on the NELTS parameter to avoid integer overflow at runtime. */
tree outer_nelts_check = NULL_TREE;
bool outer_nelts_from_type = false;
+ /* Number of the "inner" elements in "new T[OUTER_NELTS][inner]". */
offset_int inner_nelts_count = 1;
tree alloc_call, alloc_expr;
- /* Size of the inner array elements. */
+ /* Size of the inner array elements (those with constant dimensions). */
offset_int inner_size;
/* The address returned by the call to "operator new". This node is
a VAR_DECL and is therefore reusable. */
tree alloc_fn;
tree cookie_expr, init_expr;
int nothrow, check_new;
- int use_java_new = 0;
/* If non-NULL, the number of extra bytes to allocate at the
beginning of the storage allocated for an array-new expression in
order to store the number of elements. */
outer_nelts_from_type = true;
}
+ /* Lots of logic below. depends on whether we have a constant number of
+ elements, so go ahead and fold it now. */
+ if (outer_nelts)
+ outer_nelts = maybe_constant_value (outer_nelts);
+
/* If our base type is an array, then make sure we know how many elements
it has. */
for (elt_type = type;
/* Warn if we performed the (T[N]) to T[N] transformation and N is
variable. */
if (outer_nelts_from_type
- && !TREE_CONSTANT (maybe_constant_value (outer_nelts)))
+ && !TREE_CONSTANT (outer_nelts))
{
if (complain & tf_warning_or_error)
{
- const char *msg;
- if (typedef_variant_p (orig_type))
- msg = ("non-constant array new length must be specified "
- "directly, not by typedef");
- else
- msg = ("non-constant array new length must be specified "
- "without parentheses around the type-id");
- pedwarn (EXPR_LOC_OR_LOC (outer_nelts, input_location),
- OPT_Wvla, msg);
+ pedwarn (EXPR_LOC_OR_LOC (outer_nelts, input_location), OPT_Wvla,
+ typedef_variant_p (orig_type)
+ ? G_("non-constant array new length must be specified "
+ "directly, not by typedef")
+ : G_("non-constant array new length must be specified "
+ "without parentheses around the type-id"));
}
else
return error_mark_node;
}
max_outer_nelts = wi::udiv_trunc (max_size, inner_size);
- /* Only keep the top-most seven bits, to simplify encoding the
- constant in the instruction stream. */
- {
- unsigned shift = (max_outer_nelts.get_precision ()) - 7
- - wi::clz (max_outer_nelts);
- max_outer_nelts = wi::lshift (wi::lrshift (max_outer_nelts, shift),
- shift);
- }
max_outer_nelts_tree = wide_int_to_tree (sizetype, max_outer_nelts);
- size = size_binop (MULT_EXPR, size, convert (sizetype, nelts));
- outer_nelts_check = fold_build2 (LE_EXPR, boolean_type_node,
- outer_nelts,
- max_outer_nelts_tree);
+ size = size_binop (MULT_EXPR, size, fold_convert (sizetype, nelts));
+
+ if (INTEGER_CST == TREE_CODE (outer_nelts))
+ {
+ if (tree_int_cst_lt (max_outer_nelts_tree, outer_nelts))
+ {
+ /* When the array size is constant, check it at compile time
+ to make sure it doesn't exceed the implementation-defined
+ maximum, as required by C++ 14 (in C++ 11 this requirement
+ isn't explicitly stated but it's enforced anyway -- see
+ grokdeclarator in cp/decl.c). */
+ if (complain & tf_error)
+ error ("size of array is too large");
+ return error_mark_node;
+ }
+ }
+ else
+ {
+ /* When a runtime check is necessary because the array size
+ isn't constant, keep only the top-most seven bits (starting
+ with the most significant non-zero bit) of the maximum size
+ to compare the array size against, to simplify encoding the
+ constant maximum size in the instruction stream. */
+
+ unsigned shift = (max_outer_nelts.get_precision ()) - 7
+ - wi::clz (max_outer_nelts);
+ max_outer_nelts = (max_outer_nelts >> shift) << shift;
+
+ outer_nelts_check = fold_build2 (LE_EXPR, boolean_type_node,
+ outer_nelts,
+ max_outer_nelts_tree);
+ }
}
+ tree align_arg = NULL_TREE;
+ if (type_has_new_extended_alignment (elt_type))
+ align_arg = build_int_cst (align_type_node, TYPE_ALIGN_UNIT (elt_type));
+
alloc_fn = NULL_TREE;
/* If PLACEMENT is a single simple pointer type not passed by
bool member_new_p = false;
/* Allocate the object. */
- if (vec_safe_is_empty (*placement) && TYPE_FOR_JAVA (elt_type))
- {
- tree class_addr;
- tree class_decl;
- static const char alloc_name[] = "_Jv_AllocObject";
+ tree fnname;
+ tree fns;
- if (!MAYBE_CLASS_TYPE_P (elt_type))
- {
- error ("%qT isn%'t a valid Java class type", elt_type);
- return error_mark_node;
- }
+ fnname = ovl_op_identifier (false, array_p ? VEC_NEW_EXPR : NEW_EXPR);
- class_decl = build_java_class_ref (elt_type);
- if (class_decl == error_mark_node)
- return error_mark_node;
+ member_new_p = !globally_qualified_p
+ && CLASS_TYPE_P (elt_type)
+ && (array_p
+ ? TYPE_HAS_ARRAY_NEW_OPERATOR (elt_type)
+ : TYPE_HAS_NEW_OPERATOR (elt_type));
- use_java_new = 1;
- if (!get_global_value_if_present (get_identifier (alloc_name),
- &alloc_fn))
+ if (member_new_p)
+ {
+ /* Use a class-specific operator new. */
+ /* If a cookie is required, add some extra space. */
+ if (array_p && TYPE_VEC_NEW_USES_COOKIE (elt_type))
+ size = size_binop (PLUS_EXPR, size, cookie_size);
+ else
{
- if (complain & tf_error)
- error ("call to Java constructor with %qs undefined", alloc_name);
+ cookie_size = NULL_TREE;
+ /* No size arithmetic necessary, so the size check is
+ not needed. */
+ if (outer_nelts_check != NULL && inner_size == 1)
+ outer_nelts_check = NULL_TREE;
+ }
+ /* Perform the overflow check. */
+ tree errval = TYPE_MAX_VALUE (sizetype);
+ if (cxx_dialect >= cxx11 && flag_exceptions)
+ errval = throw_bad_array_new_length ();
+ if (outer_nelts_check != NULL_TREE)
+ size = fold_build3 (COND_EXPR, sizetype, outer_nelts_check,
+ size, errval);
+ /* Create the argument list. */
+ vec_safe_insert (*placement, 0, size);
+ /* Do name-lookup to find the appropriate operator. */
+ fns = lookup_fnfields (elt_type, fnname, /*protect=*/2);
+ if (fns == NULL_TREE)
+ {
+ if (complain & tf_error)
+ error ("no suitable %qD found in class %qT", fnname, elt_type);
return error_mark_node;
}
- else if (really_overloaded_fn (alloc_fn))
+ if (TREE_CODE (fns) == TREE_LIST)
{
- if (complain & tf_error)
- error ("%qD should never be overloaded", alloc_fn);
+ if (complain & tf_error)
+ {
+ error ("request for member %qD is ambiguous", fnname);
+ print_candidates (fns);
+ }
return error_mark_node;
}
- alloc_fn = OVL_CURRENT (alloc_fn);
- class_addr = build1 (ADDR_EXPR, jclass_node, class_decl);
- alloc_call = cp_build_function_call_nary (alloc_fn, complain,
- class_addr, NULL_TREE);
- }
- else if (TYPE_FOR_JAVA (elt_type) && MAYBE_CLASS_TYPE_P (elt_type))
- {
- error ("Java class %q#T object allocated using placement new", elt_type);
- return error_mark_node;
+ tree dummy = build_dummy_object (elt_type);
+ alloc_call = NULL_TREE;
+ if (align_arg)
+ {
+ vec<tree, va_gc> *align_args
+ = vec_copy_and_insert (*placement, align_arg, 1);
+ alloc_call
+ = build_new_method_call (dummy, fns, &align_args,
+ /*conversion_path=*/NULL_TREE,
+ LOOKUP_NORMAL, &alloc_fn, tf_none);
+ /* If no matching function is found and the allocated object type
+ has new-extended alignment, the alignment argument is removed
+ from the argument list, and overload resolution is performed
+ again. */
+ if (alloc_call == error_mark_node)
+ alloc_call = NULL_TREE;
+ }
+ if (!alloc_call)
+ alloc_call = build_new_method_call (dummy, fns, placement,
+ /*conversion_path=*/NULL_TREE,
+ LOOKUP_NORMAL,
+ &alloc_fn, complain);
}
else
{
- tree fnname;
- tree fns;
-
- fnname = ansi_opname (array_p ? VEC_NEW_EXPR : NEW_EXPR);
-
- member_new_p = !globally_qualified_p
- && CLASS_TYPE_P (elt_type)
- && (array_p
- ? TYPE_HAS_ARRAY_NEW_OPERATOR (elt_type)
- : TYPE_HAS_NEW_OPERATOR (elt_type));
-
- if (member_new_p)
+ /* Use a global operator new. */
+ /* See if a cookie might be required. */
+ if (!(array_p && TYPE_VEC_NEW_USES_COOKIE (elt_type)))
{
- /* Use a class-specific operator new. */
- /* If a cookie is required, add some extra space. */
- if (array_p && TYPE_VEC_NEW_USES_COOKIE (elt_type))
- size = size_binop (PLUS_EXPR, size, cookie_size);
- else
- {
- cookie_size = NULL_TREE;
- /* No size arithmetic necessary, so the size check is
- not needed. */
- if (outer_nelts_check != NULL && inner_size == 1)
- outer_nelts_check = NULL_TREE;
- }
- /* Perform the overflow check. */
- tree errval = TYPE_MAX_VALUE (sizetype);
- if (cxx_dialect >= cxx11 && flag_exceptions)
- errval = throw_bad_array_new_length ();
- if (outer_nelts_check != NULL_TREE)
- size = fold_build3 (COND_EXPR, sizetype, outer_nelts_check,
- size, errval);
- /* Create the argument list. */
- vec_safe_insert (*placement, 0, size);
- /* Do name-lookup to find the appropriate operator. */
- fns = lookup_fnfields (elt_type, fnname, /*protect=*/2);
- if (fns == NULL_TREE)
- {
- if (complain & tf_error)
- error ("no suitable %qD found in class %qT", fnname, elt_type);
- return error_mark_node;
- }
- if (TREE_CODE (fns) == TREE_LIST)
- {
- if (complain & tf_error)
- {
- error ("request for member %qD is ambiguous", fnname);
- print_candidates (fns);
- }
- return error_mark_node;
- }
- alloc_call = build_new_method_call (build_dummy_object (elt_type),
- fns, placement,
- /*conversion_path=*/NULL_TREE,
- LOOKUP_NORMAL,
- &alloc_fn,
- complain);
+ cookie_size = NULL_TREE;
+ /* No size arithmetic necessary, so the size check is
+ not needed. */
+ if (outer_nelts_check != NULL && inner_size == 1)
+ outer_nelts_check = NULL_TREE;
}
- else
- {
- /* Use a global operator new. */
- /* See if a cookie might be required. */
- if (!(array_p && TYPE_VEC_NEW_USES_COOKIE (elt_type)))
- {
- cookie_size = NULL_TREE;
- /* No size arithmetic necessary, so the size check is
- not needed. */
- if (outer_nelts_check != NULL && inner_size == 1)
- outer_nelts_check = NULL_TREE;
- }
- alloc_call = build_operator_new_call (fnname, placement,
- &size, &cookie_size,
- outer_nelts_check,
- &alloc_fn, complain);
- }
+ alloc_call = build_operator_new_call (fnname, placement,
+ &size, &cookie_size,
+ align_arg, outer_nelts_check,
+ &alloc_fn, complain);
}
if (alloc_call == error_mark_node)
gcc_assert (alloc_fn != NULL_TREE);
+ /* Now, check to see if this function is actually a placement
+ allocation function. This can happen even when PLACEMENT is NULL
+ because we might have something like:
+
+ struct S { void* operator new (size_t, int i = 0); };
+
+ A call to `new S' will get this allocation function, even though
+ there is no explicit placement argument. If there is more than
+ one argument, or there are variable arguments, then this is a
+ placement allocation function. */
+ placement_allocation_fn_p
+ = (type_num_arguments (TREE_TYPE (alloc_fn)) > 1
+ || varargs_function_p (alloc_fn));
+
+ if (warn_aligned_new
+ && !placement_allocation_fn_p
+ && TYPE_ALIGN (elt_type) > malloc_alignment ()
+ && (warn_aligned_new > 1
+ || CP_DECL_CONTEXT (alloc_fn) == global_namespace)
+ && !aligned_allocation_fn_p (alloc_fn))
+ {
+ if (warning (OPT_Waligned_new_, "%<new%> of type %qT with extended "
+ "alignment %d", elt_type, TYPE_ALIGN_UNIT (elt_type)))
+ {
+ inform (input_location, "uses %qD, which does not have an alignment "
+ "parameter", alloc_fn);
+ if (!aligned_new_threshold)
+ inform (input_location, "use %<-faligned-new%> to enable C++17 "
+ "over-aligned new support");
+ }
+ }
+
/* If we found a simple case of PLACEMENT_EXPR above, then copy it
into a temporary variable. */
if (!processing_template_decl
{
placement_expr = get_target_expr (placement_first);
CALL_EXPR_ARG (alloc_call, 1)
- = convert (TREE_TYPE (placement), placement_expr);
+ = fold_convert (TREE_TYPE (placement), placement_expr);
}
if (!member_new_p
while (TREE_CODE (alloc_call) == COMPOUND_EXPR)
alloc_call = TREE_OPERAND (alloc_call, 1);
- /* Now, check to see if this function is actually a placement
- allocation function. This can happen even when PLACEMENT is NULL
- because we might have something like:
-
- struct S { void* operator new (size_t, int i = 0); };
-
- A call to `new S' will get this allocation function, even though
- there is no explicit placement argument. If there is more than
- one argument, or there are variable arguments, then this is a
- placement allocation function. */
- placement_allocation_fn_p
- = (type_num_arguments (TREE_TYPE (alloc_fn)) > 1
- || varargs_function_p (alloc_fn));
-
/* Preevaluate the placement args so that we don't reevaluate them for a
placement delete. */
if (placement_allocation_fn_p)
So check for a null exception spec on the op new we just called. */
nothrow = TYPE_NOTHROW_P (TREE_TYPE (alloc_fn));
- check_new = (flag_check_new || nothrow) && ! use_java_new;
+ check_new
+ = flag_check_new || (nothrow && !std_placement_new_fn_p (alloc_fn));
if (cookie_size)
{
alloc_node, cookie_ptr);
size_ptr_type = build_pointer_type (sizetype);
cookie_ptr = fold_convert (size_ptr_type, cookie_ptr);
- cookie = cp_build_indirect_ref (cookie_ptr, RO_NULL, complain);
+ cookie = cp_build_fold_indirect_ref (cookie_ptr);
cookie_expr = build2 (MODIFY_EXPR, sizetype, cookie, nelts);
NEGATE_EXPR, sizetype,
size_in_bytes (sizetype)));
- cookie = cp_build_indirect_ref (cookie_ptr, RO_NULL, complain);
+ cookie = cp_build_fold_indirect_ref (cookie_ptr);
cookie = build2 (MODIFY_EXPR, sizetype, cookie,
size_in_bytes (elt_type));
cookie_expr = build2 (COMPOUND_EXPR, TREE_TYPE (cookie_expr),
the initializer anyway since we're going to throw it away and
rebuild it at instantiation time, so just build up a single
constructor call to get any appropriate diagnostics. */
- init_expr = cp_build_indirect_ref (data_addr, RO_NULL, complain);
+ init_expr = cp_build_fold_indirect_ref (data_addr);
if (type_build_ctor_call (elt_type))
init_expr = build_special_member_call (init_expr,
complete_ctor_identifier,
}
else
{
- init_expr = cp_build_indirect_ref (data_addr, RO_NULL, complain);
+ init_expr = cp_build_fold_indirect_ref (data_addr);
if (type_build_ctor_call (type) && !explicit_value_init_p)
{
}
else if (explicit_value_init_p)
{
- /* Something like `new int()'. */
- tree val = build_value_init (type, complain);
+ /* Something like `new int()'. NO_CLEANUP is needed so
+ we don't try and build a (possibly ill-formed)
+ destructor. */
+ tree val = build_value_init (type, complain | tf_no_cleanup);
if (val == error_mark_node)
return error_mark_node;
init_expr = build2 (INIT_EXPR, type, init_expr, val);
ie = build_x_compound_expr_from_vec (*init, "new initializer",
complain);
- init_expr = cp_build_modify_expr (init_expr, INIT_EXPR, ie,
- complain);
+ init_expr = cp_build_modify_expr (input_location, init_expr,
+ INIT_EXPR, ie, complain);
}
- stable = stabilize_init (init_expr, &init_preeval_expr);
+ /* If the initializer uses C++14 aggregate NSDMI that refer to the
+ object being initialized, replace them now and don't try to
+ preevaluate. */
+ bool had_placeholder = false;
+ if (!processing_template_decl
+ && TREE_CODE (init_expr) == INIT_EXPR)
+ TREE_OPERAND (init_expr, 1)
+ = replace_placeholders (TREE_OPERAND (init_expr, 1),
+ TREE_OPERAND (init_expr, 0),
+ &had_placeholder);
+ stable = (!had_placeholder
+ && stabilize_init (init_expr, &init_preeval_expr));
}
if (init_expr == error_mark_node)
unambiguous matching deallocation function can be found,
propagating the exception does not cause the object's memory to be
freed. */
- if (flag_exceptions && ! use_java_new)
+ if (flag_exceptions)
{
enum tree_code dcode = array_p ? VEC_DELETE_EXPR : DELETE_EXPR;
tree cleanup;
rval = build2 (COMPOUND_EXPR, TREE_TYPE (rval), init_preeval_expr, rval);
/* A new-expression is never an lvalue. */
- gcc_assert (!lvalue_p (rval));
+ gcc_assert (!obvalue_p (rval));
return convert (pointer_type, rval);
}
if (type == error_mark_node)
return error_mark_node;
- if (nelts == NULL_TREE && vec_safe_length (*init) == 1
+ if (nelts == NULL_TREE
/* Don't do auto deduction where it might affect mangling. */
&& (!processing_template_decl || at_function_scope_p ()))
{
tree auto_node = type_uses_auto (type);
if (auto_node)
{
- tree d_init = (**init)[0];
- d_init = resolve_nondeduced_context (d_init);
- type = do_auto_deduction (type, d_init, auto_node);
+ tree d_init = NULL_TREE;
+ if (vec_safe_length (*init) == 1)
+ {
+ d_init = (**init)[0];
+ d_init = resolve_nondeduced_context (d_init, complain);
+ }
+ type = do_auto_deduction (type, d_init, auto_node, complain);
}
}
orig_placement = make_tree_vector_copy (*placement);
orig_nelts = nelts;
if (*init)
- orig_init = make_tree_vector_copy (*init);
+ {
+ orig_init = make_tree_vector_copy (*init);
+ /* Also copy any CONSTRUCTORs in *init, since reshape_init and
+ digest_init clobber them in place. */
+ for (unsigned i = 0; i < orig_init->length(); ++i)
+ {
+ tree e = (**init)[i];
+ if (TREE_CODE (e) == CONSTRUCTOR)
+ (**init)[i] = copy_node (e);
+ }
+ }
make_args_non_dependent (*placement);
if (nelts)
else
return error_mark_node;
}
+
+ /* Try to determine the constant value only for the purposes
+ of the diagnostic below but continue to use the original
+ value and handle const folding later. */
+ const_tree cst_nelts = maybe_constant_value (nelts);
+
+ /* The expression in a noptr-new-declarator is erroneous if it's of
+ non-class type and its value before converting to std::size_t is
+ less than zero. ... If the expression is a constant expression,
+ the program is ill-fomed. */
+ if (INTEGER_CST == TREE_CODE (cst_nelts)
+ && tree_int_cst_sgn (cst_nelts) == -1)
+ {
+ if (complain & tf_error)
+ error ("size of array is negative");
+ return error_mark_node;
+ }
+
nelts = mark_rvalue_use (nelts);
nelts = cp_save_expr (cp_convert (sizetype, nelts, complain));
}
return rval;
}
-
-/* Given a Java class, return a decl for the corresponding java.lang.Class. */
-
-tree
-build_java_class_ref (tree type)
-{
- tree name = NULL_TREE, class_decl;
- static tree CL_suffix = NULL_TREE;
- if (CL_suffix == NULL_TREE)
- CL_suffix = get_identifier("class$");
- if (jclass_node == NULL_TREE)
- {
- jclass_node = IDENTIFIER_GLOBAL_VALUE (get_identifier ("jclass"));
- if (jclass_node == NULL_TREE)
- {
- error ("call to Java constructor, while %<jclass%> undefined");
- return error_mark_node;
- }
- jclass_node = TREE_TYPE (jclass_node);
- }
-
- /* Mangle the class$ field. */
- {
- tree field;
- for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
- if (DECL_NAME (field) == CL_suffix)
- {
- mangle_decl (field);
- name = DECL_ASSEMBLER_NAME (field);
- break;
- }
- if (!field)
- {
- error ("can%'t find %<class$%> in %qT", type);
- return error_mark_node;
- }
- }
-
- class_decl = IDENTIFIER_GLOBAL_VALUE (name);
- if (class_decl == NULL_TREE)
- {
- class_decl = build_decl (input_location,
- VAR_DECL, name, TREE_TYPE (jclass_node));
- TREE_STATIC (class_decl) = 1;
- DECL_EXTERNAL (class_decl) = 1;
- TREE_PUBLIC (class_decl) = 1;
- DECL_ARTIFICIAL (class_decl) = 1;
- DECL_IGNORED_P (class_decl) = 1;
- pushdecl_top_level (class_decl);
- make_decl_rtl (class_decl);
- }
- return class_decl;
-}
\f
static tree
build_vec_delete_1 (tree base, tree maxindex, tree type,
/* The below is short by the cookie size. */
virtual_size = size_binop (MULT_EXPR, size_exp,
- convert (sizetype, maxindex));
+ fold_convert (sizetype, maxindex));
tbase = create_temporary_var (ptype);
tbase_init
- = cp_build_modify_expr (tbase, NOP_EXPR,
+ = cp_build_modify_expr (input_location, tbase, NOP_EXPR,
fold_build_pointer_plus_loc (input_location,
fold_convert (ptype,
base),
fold_convert (ptype, base)));
tmp = fold_build1_loc (input_location, NEGATE_EXPR, sizetype, size_exp);
tmp = fold_build_pointer_plus (tbase, tmp);
- tmp = cp_build_modify_expr (tbase, NOP_EXPR, tmp, complain);
+ tmp = cp_build_modify_expr (input_location, tbase, NOP_EXPR, tmp, complain);
if (tmp == error_mark_node)
return error_mark_node;
body = build_compound_expr (input_location, body, tmp);
/* The below is short by the cookie size. */
virtual_size = size_binop (MULT_EXPR, size_exp,
- convert (sizetype, maxindex));
+ fold_convert (sizetype, maxindex));
if (! TYPE_VEC_NEW_USES_COOKIE (type))
/* no header */
else if (!body)
body = deallocate_expr;
else
- body = build_compound_expr (input_location, body, deallocate_expr);
+ /* The delete operator mist be called, even if a destructor
+ throws. */
+ body = build2 (TRY_FINALLY_EXPR, void_type_node, body, deallocate_expr);
if (!body)
body = integer_zero_node;
/* Outermost wrapper: If pointer is null, punt. */
- body = fold_build3_loc (input_location, COND_EXPR, void_type_node,
- fold_build2_loc (input_location,
- NE_EXPR, boolean_type_node, base,
- convert (TREE_TYPE (base),
- nullptr_node)),
- body, integer_zero_node);
+ tree cond = build2_loc (input_location, NE_EXPR, boolean_type_node, base,
+ fold_convert (TREE_TYPE (base), nullptr_node));
+ /* This is a compiler generated comparison, don't emit
+ e.g. -Wnonnull-compare warning for it. */
+ TREE_NO_WARNING (cond) = 1;
+ body = build3_loc (input_location, COND_EXPR, void_type_node,
+ cond, body, integer_zero_node);
+ COND_EXPR_IS_VEC_DELETE (body) = true;
body = build1 (NOP_EXPR, void_type_node, body);
if (controller)
decl = create_temporary_var (type);
add_decl_expr (decl);
- finish_expr_stmt (cp_build_modify_expr (decl, INIT_EXPR, init,
- tf_warning_or_error));
+ finish_expr_stmt (cp_build_modify_expr (input_location, decl, INIT_EXPR,
+ init, tf_warning_or_error));
return decl;
}
vec_copy_assign_is_trivial (tree inner_elt_type, tree init)
{
tree fromtype = inner_elt_type;
- if (real_lvalue_p (init))
+ if (lvalue_p (init))
fromtype = cp_build_reference_type (fromtype, /*rval*/false);
return is_trivially_xible (MODIFY_EXPR, inner_elt_type, fromtype);
}
+/* Subroutine of build_vec_init: Check that the array has at least N
+ elements. Other parameters are local variables in build_vec_init. */
+
+void
+finish_length_check (tree atype, tree iterator, tree obase, unsigned n)
+{
+ tree nelts = build_int_cst (ptrdiff_type_node, n - 1);
+ if (TREE_CODE (atype) != ARRAY_TYPE)
+ {
+ if (flag_exceptions)
+ {
+ tree c = fold_build2 (LT_EXPR, boolean_type_node, iterator,
+ nelts);
+ c = build3 (COND_EXPR, void_type_node, c,
+ throw_bad_array_new_length (), void_node);
+ finish_expr_stmt (c);
+ }
+ /* Don't check an array new when -fno-exceptions. */
+ }
+ else if (sanitize_flags_p (SANITIZE_BOUNDS)
+ && current_function_decl != NULL_TREE)
+ {
+ /* Make sure the last element of the initializer is in bounds. */
+ finish_expr_stmt
+ (ubsan_instrument_bounds
+ (input_location, obase, &nelts, /*ignore_off_by_one*/false));
+ }
+}
+
/* `build_vec_init' returns tree structure that performs
initialization of a vector of aggregate types.
tree obase = base;
bool xvalue = false;
bool errors = false;
+ location_t loc = (init ? EXPR_LOC_OR_LOC (init, input_location)
+ : location_of (base));
if (TREE_CODE (atype) == ARRAY_TYPE && TYPE_DOMAIN (atype))
maxindex = array_type_nelts (atype);
if (maxindex == NULL_TREE || maxindex == error_mark_node)
return error_mark_node;
+ maxindex = maybe_constant_value (maxindex);
if (explicit_value_init_p)
gcc_assert (!init);
&& from_array != 2)
init = TARGET_EXPR_INITIAL (init);
- /* If we have a braced-init-list, make sure that the array
+ bool direct_init = false;
+ if (from_array && init && BRACE_ENCLOSED_INITIALIZER_P (init)
+ && CONSTRUCTOR_NELTS (init) == 1)
+ {
+ tree elt = CONSTRUCTOR_ELT (init, 0)->value;
+ if (TREE_CODE (TREE_TYPE (elt)) == ARRAY_TYPE)
+ {
+ direct_init = DIRECT_LIST_INIT_P (init);
+ init = elt;
+ }
+ }
+
+ /* If we have a braced-init-list or string constant, make sure that the array
is big enough for all the initializers. */
- bool length_check = (init && TREE_CODE (init) == CONSTRUCTOR
- && CONSTRUCTOR_NELTS (init) > 0
+ bool length_check = (init
+ && (TREE_CODE (init) == STRING_CST
+ || (TREE_CODE (init) == CONSTRUCTOR
+ && CONSTRUCTOR_NELTS (init) > 0))
&& !TREE_CONSTANT (maxindex));
if (init
? vec_copy_assign_is_trivial (inner_elt_type, init)
: !TYPE_NEEDS_CONSTRUCTING (type))
&& ((TREE_CODE (init) == CONSTRUCTOR
+ && (BRACE_ENCLOSED_INITIALIZER_P (init)
+ || (same_type_ignoring_top_level_qualifiers_p
+ (atype, TREE_TYPE (init))))
/* Don't do this if the CONSTRUCTOR might contain something
that might throw and require us to clean up. */
&& (vec_safe_is_empty (CONSTRUCTOR_ELTS (init))
}
maxindex = cp_convert (ptrdiff_type_node, maxindex, complain);
+ maxindex = fold_simple (maxindex);
+
if (TREE_CODE (atype) == ARRAY_TYPE)
{
ptype = build_pointer_type (type);
from_array = 0;
if (length_check)
- {
- tree nelts = build_int_cst (ptrdiff_type_node,
- CONSTRUCTOR_NELTS (init) - 1);
- if (TREE_CODE (atype) != ARRAY_TYPE)
- {
- if (flag_exceptions)
- {
- tree c = fold_build2 (LT_EXPR, boolean_type_node, iterator,
- nelts);
- c = build3 (COND_EXPR, void_type_node, c,
- throw_bad_array_new_length (), void_node);
- finish_expr_stmt (c);
- }
- /* Don't check an array new when -fno-exceptions. */
- }
- else if (flag_sanitize & SANITIZE_BOUNDS
- && do_ubsan_in_current_function ())
- {
- /* Make sure the last element of the initializer is in bounds. */
- finish_expr_stmt
- (ubsan_instrument_bounds
- (input_location, obase, &nelts, /*ignore_off_by_one*/false));
- }
- }
+ finish_length_check (atype, iterator, obase, CONSTRUCTOR_NELTS (init));
if (try_const)
vec_alloc (const_vec, CONSTRUCTOR_NELTS (init));
else if (MAYBE_CLASS_TYPE_P (type) || TREE_CODE (type) == ARRAY_TYPE)
one_init = build_aggr_init (baseref, elt, 0, complain);
else
- one_init = cp_build_modify_expr (baseref, NOP_EXPR,
- elt, complain);
+ one_init = cp_build_modify_expr (input_location, baseref,
+ NOP_EXPR, elt, complain);
if (one_init == error_mark_node)
errors = true;
if (try_const)
finish_expr_stmt (one_init);
current_stmt_tree ()->stmts_are_full_exprs_p = 0;
- one_init = cp_build_unary_op (PREINCREMENT_EXPR, base, 0, complain);
+ one_init = cp_build_unary_op (PREINCREMENT_EXPR, base, false,
+ complain);
if (one_init == error_mark_node)
errors = true;
else
finish_expr_stmt (one_init);
- one_init = cp_build_unary_op (PREDECREMENT_EXPR, iterator, 0,
+ one_init = cp_build_unary_op (PREDECREMENT_EXPR, iterator, false,
complain);
if (one_init == error_mark_node)
errors = true;
/* Any elements without explicit initializers get T{}. */
empty_list = true;
}
+ else if (init && TREE_CODE (init) == STRING_CST)
+ {
+ /* Check that the array is at least as long as the string. */
+ if (length_check)
+ finish_length_check (atype, iterator, obase,
+ TREE_STRING_LENGTH (init));
+ tree length = build_int_cst (ptrdiff_type_node,
+ TREE_STRING_LENGTH (init));
+
+ /* Copy the string to the first part of the array. */
+ tree alias_set = build_int_cst (build_pointer_type (type), 0);
+ tree lhs = build2 (MEM_REF, TREE_TYPE (init), base, alias_set);
+ tree stmt = build2 (MODIFY_EXPR, void_type_node, lhs, init);
+ finish_expr_stmt (stmt);
+
+ /* Adjust the counter and pointer. */
+ stmt = cp_build_binary_op (loc, MINUS_EXPR, iterator, length, complain);
+ stmt = build2 (MODIFY_EXPR, void_type_node, iterator, stmt);
+ finish_expr_stmt (stmt);
+
+ stmt = cp_build_binary_op (loc, PLUS_EXPR, base, length, complain);
+ stmt = build2 (MODIFY_EXPR, void_type_node, base, stmt);
+ finish_expr_stmt (stmt);
+
+ /* And set the rest of the array to NUL. */
+ from_array = 0;
+ explicit_value_init_p = true;
+ }
else if (from_array)
{
if (init)
&& (num_initialized_elts
== tree_to_shwi (maxindex) + 1))))
{
- /* If the ITERATOR is equal to -1, then we don't have to loop;
+ /* If the ITERATOR is lesser or equal to -1, then we don't have to loop;
we've already initialized all the elements. */
tree for_stmt;
tree elt_init;
tree to;
for_stmt = begin_for_stmt (NULL_TREE, NULL_TREE);
- finish_for_init_stmt (for_stmt);
- finish_for_cond (build2 (NE_EXPR, boolean_type_node, iterator,
+ finish_init_stmt (for_stmt);
+ finish_for_cond (build2 (GT_EXPR, boolean_type_node, iterator,
build_int_cst (TREE_TYPE (iterator), -1)),
- for_stmt, false);
- elt_init = cp_build_unary_op (PREDECREMENT_EXPR, iterator, 0,
+ for_stmt, false, 0);
+ elt_init = cp_build_unary_op (PREDECREMENT_EXPR, iterator, false,
complain);
if (elt_init == error_mark_node)
errors = true;
from = build1 (INDIRECT_REF, itype, base2);
if (xvalue)
from = move (from);
+ if (direct_init)
+ from = build_tree_list (NULL_TREE, from);
}
else
from = NULL_TREE;
if (from_array == 2)
- elt_init = cp_build_modify_expr (to, NOP_EXPR, from,
- complain);
+ elt_init = cp_build_modify_expr (input_location, to, NOP_EXPR,
+ from, complain);
else if (type_build_ctor_call (type))
elt_init = build_aggr_init (to, from, 0, complain);
else if (from)
- elt_init = cp_build_modify_expr (to, NOP_EXPR, from,
+ elt_init = cp_build_modify_expr (input_location, to, NOP_EXPR, from,
complain);
else
gcc_unreachable ();
if (TREE_CODE (init) == TREE_LIST)
init = build_x_compound_expr_from_list (init, ELK_INIT,
complain);
- elt_init = build2 (INIT_EXPR, type, to, init);
+ elt_init = (init == error_mark_node
+ ? error_mark_node
+ : build2 (INIT_EXPR, type, to, init));
}
}
finish_expr_stmt (elt_init);
current_stmt_tree ()->stmts_are_full_exprs_p = 0;
- finish_expr_stmt (cp_build_unary_op (PREINCREMENT_EXPR, base, 0,
+ finish_expr_stmt (cp_build_unary_op (PREINCREMENT_EXPR, base, false,
complain));
if (base2)
- finish_expr_stmt (cp_build_unary_op (PREINCREMENT_EXPR, base2, 0,
+ finish_expr_stmt (cp_build_unary_op (PREINCREMENT_EXPR, base2, false,
complain));
finish_for_stmt (for_stmt);
{
atype = build_pointer_type (atype);
stmt_expr = build1 (NOP_EXPR, atype, stmt_expr);
- stmt_expr = cp_build_indirect_ref (stmt_expr, RO_NULL, complain);
+ stmt_expr = cp_build_fold_indirect_ref (stmt_expr);
TREE_NO_WARNING (stmt_expr) = 1;
}
&& MAYBE_CLASS_TYPE_P (type) && !CLASSTYPE_FINAL (type)
&& TYPE_POLYMORPHIC_P (type))
{
- tree dtor;
- dtor = CLASSTYPE_DESTRUCTORS (type);
+ tree dtor = CLASSTYPE_DESTRUCTOR (type);
if (!dtor || !DECL_VINDEX (dtor))
{
if (CLASSTYPE_PURE_VIRTUALS (type))
warning (OPT_Wdelete_non_virtual_dtor,
"deleting object of abstract class type %qT"
" which has non-virtual destructor"
- " will cause undefined behaviour", type);
+ " will cause undefined behavior", type);
else
warning (OPT_Wdelete_non_virtual_dtor,
"deleting object of polymorphic class type %qT"
" which has non-virtual destructor"
- " might cause undefined behaviour", type);
+ " might cause undefined behavior", type);
}
}
}
/* Make sure the destructor is callable. */
if (type_build_dtor_call (type))
{
- expr = build_dtor_call (cp_build_indirect_ref (addr, RO_NULL,
- complain),
+ expr = build_dtor_call (cp_build_fold_indirect_ref (addr),
sfk_complete_destructor, flags, complain);
if (expr == error_mark_node)
return error_mark_node;
/* If the destructor is non-virtual, there is no deleting
variant. Instead, we must explicitly call the appropriate
`operator delete' here. */
- else if (!DECL_VIRTUAL_P (CLASSTYPE_DESTRUCTORS (type))
+ else if (!DECL_VIRTUAL_P (CLASSTYPE_DESTRUCTOR (type))
&& auto_delete == sfk_deleting_destructor)
{
/* We will use ADDR multiple times so we must save it. */
complain);
}
- expr = build_dtor_call (cp_build_indirect_ref (addr, RO_NULL, complain),
+ expr = build_dtor_call (cp_build_fold_indirect_ref (addr),
auto_delete, flags, complain);
if (expr == error_mark_node)
return error_mark_node;
if (do_delete)
- expr = build2 (COMPOUND_EXPR, void_type_node, expr, do_delete);
+ /* The delete operator must be called, regardless of whether
+ the destructor throws.
+
+ [expr.delete]/7 The deallocation function is called
+ regardless of whether the destructor for the object or some
+ element of the array throws an exception. */
+ expr = build2 (TRY_FINALLY_EXPR, void_type_node, expr, do_delete);
/* We need to calculate this before the dtor changes the vptr. */
if (head)
else
{
/* Handle deleting a null pointer. */
- ifexp = fold (cp_build_binary_op (input_location,
- NE_EXPR, addr, nullptr_node,
- complain));
+ warning_sentinel s (warn_address);
+ ifexp = cp_build_binary_op (input_location, NE_EXPR, addr,
+ nullptr_node, complain);
if (ifexp == error_mark_node)
return error_mark_node;
+ /* This is a compiler generated comparison, don't emit
+ e.g. -Wnonnull-compare warning for it. */
+ else if (TREE_CODE (ifexp) == NE_EXPR)
+ TREE_NO_WARNING (ifexp) = 1;
}
if (ifexp != integer_one_node)
vec<tree, va_gc> *vbases;
/* Run destructors for all virtual baseclasses. */
- if (CLASSTYPE_VBASECLASSES (current_class_type))
+ if (!ABSTRACT_CLASS_TYPE_P (current_class_type)
+ && CLASSTYPE_VBASECLASSES (current_class_type))
{
tree cond = (condition_conversion
(build2 (BIT_AND_EXPR, integer_type_node,
sizetype, TYPE_SIZE_UNIT (sizetype));
cookie_addr = fold_build_pointer_plus (fold_convert (size_ptr_type, base),
cookie_addr);
- maxindex = cp_build_indirect_ref (cookie_addr, RO_NULL, complain);
+ maxindex = cp_build_fold_indirect_ref (cookie_addr);
}
else if (TREE_CODE (type) == ARRAY_TYPE)
{
return rval;
}
+
+#include "gt-cp-init.h"