/* Handle initialization things in C++.
- Copyright (C) 1987-2017 Free Software Foundation, Inc.
+ Copyright (C) 1987-2018 Free Software Foundation, Inc.
Contributed by Michael Tiemann (tiemann@cygnus.com)
This file is part of GCC.
#include "gimplify.h"
#include "c-family/c-ubsan.h"
#include "intl.h"
+#include "stringpool.h"
+#include "attribs.h"
+#include "asan.h"
static bool begin_init_stmts (tree *, tree *);
static tree finish_init_stmts (bool, tree, tree);
static tree build_field_list (tree, tree, int *);
static int diagnose_uninitialized_cst_or_ref_member_1 (tree, tree, bool, bool);
+static GTY(()) tree fn;
+
/* We are about to generate some complex initialization code.
Conceptually, it is all a single expression. However, we may want
to include conditionals, loops, and other such statement-level
else if (VECTOR_TYPE_P (type))
init = build_zero_cst (type);
else
- gcc_assert (TREE_CODE (type) == REFERENCE_TYPE);
+ {
+ gcc_assert (TREE_CODE (type) == REFERENCE_TYPE);
+ init = build_zero_cst (type);
+ }
/* In all cases, the initializer is a constant. */
if (init)
/* Return the non-static data initializer for FIELD_DECL MEMBER. */
+static GTY((cache)) tree_cache_map *nsdmi_inst;
+
tree
-get_nsdmi (tree member, bool in_ctor)
+get_nsdmi (tree member, bool in_ctor, tsubst_flags_t complain)
{
tree init;
tree save_ccp = current_class_ptr;
tree save_ccr = current_class_ref;
- if (!in_ctor)
- {
- /* Use a PLACEHOLDER_EXPR when we don't have a 'this' parameter to
- refer to; constexpr evaluation knows what to do with it. */
- current_class_ref = build0 (PLACEHOLDER_EXPR, DECL_CONTEXT (member));
- current_class_ptr = build_address (current_class_ref);
- }
-
if (DECL_LANG_SPECIFIC (member) && DECL_TEMPLATE_INFO (member))
{
init = DECL_INITIAL (DECL_TI_TEMPLATE (member));
+ location_t expr_loc
+ = EXPR_LOC_OR_LOC (init, DECL_SOURCE_LOCATION (member));
+ tree *slot;
if (TREE_CODE (init) == DEFAULT_ARG)
- goto unparsed;
-
+ /* Unparsed. */;
+ else if (nsdmi_inst && (slot = nsdmi_inst->get (member)))
+ init = *slot;
/* Check recursive instantiation. */
- if (DECL_INSTANTIATING_NSDMI_P (member))
+ else if (DECL_INSTANTIATING_NSDMI_P (member))
{
- error ("recursive instantiation of non-static data member "
- "initializer for %qD", member);
+ if (complain & tf_error)
+ error_at (expr_loc, "recursive instantiation of default member "
+ "initializer for %qD", member);
init = error_mark_node;
}
else
{
+ int un = cp_unevaluated_operand;
+ cp_unevaluated_operand = 0;
+
+ location_t sloc = input_location;
+ input_location = expr_loc;
+
DECL_INSTANTIATING_NSDMI_P (member) = 1;
-
+
+ inject_this_parameter (DECL_CONTEXT (member), TYPE_UNQUALIFIED);
+
+ start_lambda_scope (member);
+
/* Do deferred instantiation of the NSDMI. */
init = (tsubst_copy_and_build
(init, DECL_TI_ARGS (member),
- tf_warning_or_error, member, /*function_p=*/false,
+ complain, member, /*function_p=*/false,
/*integral_constant_expression_p=*/false));
- init = digest_nsdmi_init (member, init);
-
+ init = digest_nsdmi_init (member, init, complain);
+
+ finish_lambda_scope ();
+
DECL_INSTANTIATING_NSDMI_P (member) = 0;
+
+ if (init != error_mark_node)
+ {
+ if (!nsdmi_inst)
+ nsdmi_inst = tree_cache_map::create_ggc (37);
+ nsdmi_inst->put (member, init);
+ }
+
+ input_location = sloc;
+ cp_unevaluated_operand = un;
}
}
else
+ init = DECL_INITIAL (member);
+
+ if (init && TREE_CODE (init) == DEFAULT_ARG)
{
- init = DECL_INITIAL (member);
- if (init && TREE_CODE (init) == DEFAULT_ARG)
+ if (complain & tf_error)
{
- unparsed:
- error ("constructor required before non-static data member "
- "for %qD has been parsed", member);
+ error ("default member initializer for %qD required before the end "
+ "of its enclosing class", member);
+ inform (location_of (init), "defined here");
DECL_INITIAL (member) = error_mark_node;
- init = error_mark_node;
}
- /* Strip redundant TARGET_EXPR so we don't need to remap it, and
- so the aggregate init code below will see a CONSTRUCTOR. */
- bool simple_target = (init && SIMPLE_TARGET_EXPR_P (init));
- if (simple_target)
- init = TARGET_EXPR_INITIAL (init);
- init = break_out_target_exprs (init);
- if (simple_target && TREE_CODE (init) != CONSTRUCTOR)
- /* Now put it back so C++17 copy elision works. */
- init = get_target_expr (init);
+ init = error_mark_node;
+ }
+
+ if (in_ctor)
+ {
+ current_class_ptr = save_ccp;
+ current_class_ref = save_ccr;
+ }
+ else
+ {
+ /* Use a PLACEHOLDER_EXPR when we don't have a 'this' parameter to
+ refer to; constexpr evaluation knows what to do with it. */
+ current_class_ref = build0 (PLACEHOLDER_EXPR, DECL_CONTEXT (member));
+ current_class_ptr = build_address (current_class_ref);
}
+
+ /* Strip redundant TARGET_EXPR so we don't need to remap it, and
+ so the aggregate init code below will see a CONSTRUCTOR. */
+ bool simple_target = (init && SIMPLE_TARGET_EXPR_P (init));
+ if (simple_target)
+ init = TARGET_EXPR_INITIAL (init);
+ init = break_out_target_exprs (init);
+ if (simple_target && TREE_CODE (init) != CONSTRUCTOR)
+ /* Now put it back so C++17 copy elision works. */
+ init = get_target_expr (init);
+
current_class_ptr = save_ccp;
current_class_ref = save_ccr;
return init;
/* Use the non-static data member initializer if there was no
mem-initializer for this field. */
if (init == NULL_TREE)
- init = get_nsdmi (member, /*ctor*/true);
+ init = get_nsdmi (member, /*ctor*/true, tf_warning_or_error);
if (init == error_mark_node)
return;
base_addr = build_base_path (PLUS_EXPR, current_class_ptr,
subobject, 1, tf_warning_or_error);
expand_aggr_init_1 (subobject, NULL_TREE,
- cp_build_indirect_ref (base_addr, RO_NULL,
- tf_warning_or_error),
+ cp_build_fold_indirect_ref (base_addr),
arguments,
flags,
tf_warning_or_error);
/* Compute the value to use, when there's a VTT. */
vtt_parm = current_vtt_parm;
vtbl2 = fold_build_pointer_plus (vtt_parm, vtt_index);
- vtbl2 = cp_build_indirect_ref (vtbl2, RO_NULL, tf_warning_or_error);
+ vtbl2 = cp_build_fold_indirect_ref (vtbl2);
vtbl2 = convert (TREE_TYPE (vtbl), vtbl2);
/* The actual initializer is the VTT value only in the subobject
}
/* Compute the location of the vtpr. */
- vtbl_ptr = build_vfield_ref (cp_build_indirect_ref (decl, RO_NULL,
- tf_warning_or_error),
+ vtbl_ptr = build_vfield_ref (cp_build_fold_indirect_ref (decl),
TREE_TYPE (binfo));
gcc_assert (vtbl_ptr != error_mark_node);
if (init == error_mark_node)
return error_mark_node;
+ location_t init_loc = (init
+ ? EXPR_LOC_OR_LOC (init, input_location)
+ : location_of (exp));
+
TREE_READONLY (exp) = 0;
TREE_THIS_VOLATILE (exp) = 0;
TREE_TYPE (init) = cv_unqualified (itype);
from_array = (itype && same_type_p (TREE_TYPE (init),
TREE_TYPE (exp)));
+
+ if (init && !from_array
+ && !BRACE_ENCLOSED_INITIALIZER_P (init))
+ {
+ if (complain & tf_error)
+ permerror (init_loc, "array must be initialized "
+ "with a brace-enclosed initializer");
+ else
+ return error_mark_node;
+ }
}
stmt_expr = build_vec_init (exp, NULL_TREE, init,
tsubst_flags_t complain)
{
tree type = TREE_TYPE (exp);
- tree ctor_name;
/* It fails because there may not be a constructor which takes
its own type as the first (or only parameter), but which does
}
else
{
- if (true_exp == exp)
- ctor_name = complete_ctor_identifier;
- else
- ctor_name = base_ctor_identifier;
+ tree ctor_name = (true_exp == exp
+ ? complete_ctor_identifier : base_ctor_identifier);
+
rval = build_special_member_call (exp, ctor_name, &parms, binfo, flags,
complain);
}
if (TREE_CODE (t) != TEMPLATE_ID_EXPR && !really_overloaded_fn (t))
{
/* Get rid of a potential OVERLOAD around it. */
- t = OVL_CURRENT (t);
+ t = OVL_FIRST (t);
/* Unique functions are handled easily. */
initializer for the static data member is not processed
until needed; we need it now. */
mark_used (decl, tf_none);
- mark_rvalue_use (decl);
init = DECL_INITIAL (decl);
if (init == error_mark_node)
{
else if (init->is_empty ())
init_list = void_node;
else
- init_list = build_tree_list_vec (init);
+ {
+ init_list = build_tree_list_vec (init);
+ for (tree v = init_list; v; v = TREE_CHAIN (v))
+ if (TREE_CODE (TREE_VALUE (v)) == OVERLOAD)
+ lookup_keep (TREE_VALUE (v), true);
+ }
new_expr = build4 (NEW_EXPR, build_pointer_type (type),
build_tree_list_vec (placement), type, nelts,
tree
throw_bad_array_new_length (void)
{
- tree fn = get_identifier ("__cxa_throw_bad_array_new_length");
- if (!get_global_value_if_present (fn, &fn))
- fn = push_throw_library_fn (fn, build_function_type_list (sizetype,
- NULL_TREE));
+ if (!fn)
+ {
+ tree name = get_identifier ("__cxa_throw_bad_array_new_length");
+
+ fn = get_global_binding (name);
+ if (!fn)
+ fn = push_throw_library_fn
+ (name, build_function_type_list (sizetype, NULL_TREE));
+ }
return build_cxx_call (fn, 0, NULL, tf_warning_or_error);
}
-/* Attempt to find the initializer for field T in the initializer INIT,
- when non-null. Returns the initializer when successful and NULL
- otherwise. */
+/* Attempt to find the initializer for flexible array field T in the
+ initializer INIT, when non-null. Returns the initializer when
+ successful and NULL otherwise. */
static tree
-find_field_init (tree t, tree init)
+find_flexarray_init (tree t, tree init)
{
- if (!init)
+ if (!init || init == error_mark_node)
return NULL_TREE;
unsigned HOST_WIDE_INT idx;
/* Iterate over all top-level initializer elements. */
FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), idx, field, elt)
- {
- /* If the member T is found, return it. */
- if (field == t)
- return elt;
-
- /* Otherwise continue and/or recurse into nested initializers. */
- if (TREE_CODE (elt) == CONSTRUCTOR
- && (init = find_field_init (t, elt)))
- return init;
- }
+ /* If the member T is found, return it. */
+ if (field == t)
+ return elt;
+
return NULL_TREE;
}
/* The number of bytes to add to or subtract from the size of the provided
buffer based on an offset into an array or an array element reference.
- Although intermediate results may be negative (as in a[3] - 2) the final
- result cannot be. */
- HOST_WIDE_INT adjust = 0;
+ Although intermediate results may be negative (as in a[3] - 2) a valid
+ final result cannot be. */
+ offset_int adjust = 0;
/* True when the size of the entire destination object should be used
to compute the possibly optimistic estimate of the available space. */
bool use_obj_size = false;
is a constant. */
if (TREE_CODE (oper) == POINTER_PLUS_EXPR)
{
- /* If the offset is comple-time constant, use it to compute a more
+ /* If the offset is compile-time constant, use it to compute a more
accurate estimate of the size of the buffer. Since the operand
of POINTER_PLUS_EXPR is represented as an unsigned type, convert
it to signed first.
estimate (this may lead to false negatives). */
tree adj = TREE_OPERAND (oper, 1);
if (CONSTANT_CLASS_P (adj))
- adjust += tree_to_shwi (convert (ssizetype, adj));
+ adjust += wi::to_offset (convert (ssizetype, adj));
else
use_obj_size = true;
not a compile-time constant, use the index to determine the
size of the buffer. Otherwise, use the entire array as
an optimistic estimate of the size. */
- const_tree adj = TREE_OPERAND (oper, 1);
+ const_tree adj = fold_non_dependent_expr (TREE_OPERAND (oper, 1));
if (!use_obj_size && CONSTANT_CLASS_P (adj))
- adjust += tree_to_shwi (adj);
+ adjust += wi::to_offset (adj);
else
{
use_obj_size = true;
members from arrays of unspecified size. */
bool compref = TREE_CODE (oper) == COMPONENT_REF;
+ /* For COMPONENT_REF (i.e., a struct member) the size of the entire
+ enclosing struct. Used to validate the adjustment (offset) into
+ an array at the end of a struct. */
+ offset_int compsize = 0;
+
/* Descend into a struct or union to find the member whose address
is being used as the argument. */
if (TREE_CODE (oper) == COMPONENT_REF)
{
+ tree comptype = TREE_TYPE (TREE_OPERAND (oper, 0));
+ compsize = wi::to_offset (TYPE_SIZE_UNIT (comptype));
+
tree op0 = oper;
while (TREE_CODE (op0 = TREE_OPERAND (op0, 0)) == COMPONENT_REF);
if (VAR_P (op0))
oper = TREE_OPERAND (oper, 1);
}
- if ((addr_expr || !POINTER_TYPE_P (TREE_TYPE (oper)))
+ tree opertype = TREE_TYPE (oper);
+ if ((addr_expr || !POINTER_TYPE_P (opertype))
&& (VAR_P (oper)
|| TREE_CODE (oper) == FIELD_DECL
|| TREE_CODE (oper) == PARM_DECL))
{
/* A possibly optimistic estimate of the number of bytes available
in the destination buffer. */
- unsigned HOST_WIDE_INT bytes_avail = 0;
+ offset_int bytes_avail = 0;
/* True when the estimate above is in fact the exact size
of the destination buffer rather than an estimate. */
bool exact_size = true;
/* Use the size of the entire array object when the expression
refers to a variable or its size depends on an expression
that's not a compile-time constant. */
- bytes_avail = tree_to_uhwi (DECL_SIZE_UNIT (oper));
+ bytes_avail = wi::to_offset (DECL_SIZE_UNIT (oper));
exact_size = !use_obj_size;
}
- else if (TYPE_SIZE_UNIT (TREE_TYPE (oper))
- && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (oper))))
+ else if (tree opersize = TYPE_SIZE_UNIT (opertype))
{
/* Use the size of the type of the destination buffer object
- as the optimistic estimate of the available space in it. */
- bytes_avail = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (oper)));
- }
- else if (var_decl)
- {
- /* Constructing into a buffer provided by the flexible array
- member of a declared object (which is permitted as a G++
- extension). If the array member has been initialized,
- determine its size from the initializer. Otherwise,
- the array size is zero. */
- bytes_avail = 0;
-
- if (tree init = find_field_init (oper, DECL_INITIAL (var_decl)))
- bytes_avail = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (init)));
+ as the optimistic estimate of the available space in it.
+ Use the maximum possible size for zero-size arrays and
+ flexible array members (except of initialized objects
+ thereof). */
+ if (TREE_CODE (opersize) == INTEGER_CST)
+ bytes_avail = wi::to_offset (opersize);
}
- else
+
+ if (bytes_avail == 0)
{
- /* Bail if neither the size of the object nor its type is known. */
- return;
+ if (var_decl)
+ {
+ /* Constructing into a buffer provided by the flexible array
+ member of a declared object (which is permitted as a G++
+ extension). If the array member has been initialized,
+ determine its size from the initializer. Otherwise,
+ the array size is zero. */
+ if (tree init = find_flexarray_init (oper,
+ DECL_INITIAL (var_decl)))
+ bytes_avail = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (init)));
+ }
+ else
+ bytes_avail = (wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node))
+ - compsize);
}
- tree_code oper_code = TREE_CODE (TREE_TYPE (oper));
+ tree_code oper_code = TREE_CODE (opertype);
if (compref && oper_code == ARRAY_TYPE)
{
- /* Avoid diagnosing flexible array members (which are accepted
- as an extension and diagnosed with -Wpedantic) and zero-length
- arrays (also an extension).
- Overflowing construction in one-element arrays is diagnosed
- only at level 2. */
- if (bytes_avail == 0 && !var_decl)
- return;
-
- tree nelts = array_type_nelts_top (TREE_TYPE (oper));
+ tree nelts = array_type_nelts_top (opertype);
tree nelts_cst = maybe_constant_value (nelts);
if (TREE_CODE (nelts_cst) == INTEGER_CST
&& integer_onep (nelts_cst)
return;
}
- /* The size of the buffer can only be adjusted down but not up. */
- gcc_checking_assert (0 <= adjust);
-
/* Reduce the size of the buffer by the adjustment computed above
from the offset and/or the index into the array. */
- if (bytes_avail < static_cast<unsigned HOST_WIDE_INT>(adjust))
+ if (bytes_avail < adjust || adjust < 0)
bytes_avail = 0;
else
- bytes_avail -= adjust;
+ {
+ tree elttype = (TREE_CODE (opertype) == ARRAY_TYPE
+ ? TREE_TYPE (opertype) : opertype);
+ if (tree eltsize = TYPE_SIZE_UNIT (elttype))
+ {
+ bytes_avail -= adjust * wi::to_offset (eltsize);
+ if (bytes_avail < 0)
+ bytes_avail = 0;
+ }
+ }
/* The minimum amount of space needed for the allocation. This
is an optimistic estimate that makes it possible to detect
placement new invocation for some undersize buffers but not
others. */
- unsigned HOST_WIDE_INT bytes_need;
+ offset_int bytes_need;
if (CONSTANT_CLASS_P (size))
- bytes_need = tree_to_uhwi (size);
+ bytes_need = wi::to_offset (size);
else if (nelts && CONSTANT_CLASS_P (nelts))
- bytes_need = tree_to_uhwi (nelts)
- * tree_to_uhwi (TYPE_SIZE_UNIT (type));
+ bytes_need = (wi::to_offset (nelts)
+ * wi::to_offset (TYPE_SIZE_UNIT (type)));
else if (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
- bytes_need = tree_to_uhwi (TYPE_SIZE_UNIT (type));
+ bytes_need = wi::to_offset (TYPE_SIZE_UNIT (type));
else
{
/* The type is a VLA. */
: "placement new constructing an object of type "
"%<%T [%wu]%> and size %qwu in a region of type %qT "
"and size at most %qwu",
- type, tree_to_uhwi (nelts), bytes_need,
- TREE_TYPE (oper),
- bytes_avail);
+ type, tree_to_uhwi (nelts), bytes_need.to_uhwi (),
+ opertype, bytes_avail.to_uhwi ());
else
warning_at (loc, OPT_Wplacement_new_,
exact_size ?
: "placement new constructing an array of objects "
"of type %qT and size %qwu in a region of type %qT "
"and size at most %qwu",
- type, bytes_need, TREE_TYPE (oper),
- bytes_avail);
+ type, bytes_need.to_uhwi (), opertype,
+ bytes_avail.to_uhwi ());
else
warning_at (loc, OPT_Wplacement_new_,
exact_size ?
: "placement new constructing an object of type %qT "
"and size %qwu in a region of type %qT and size "
"at most %qwu",
- type, bytes_need, TREE_TYPE (oper),
- bytes_avail);
+ type, bytes_need.to_uhwi (), opertype,
+ bytes_avail.to_uhwi ());
}
}
}
return MAX (max_align_t_align(), MALLOC_ABI_ALIGNMENT);
}
+/* Determine whether an allocation function is a namespace-scope
+ non-replaceable placement new function. See DR 1748.
+ TODO: Enable in all standard modes. */
+static bool
+std_placement_new_fn_p (tree alloc_fn)
+{
+ if (DECL_NAMESPACE_SCOPE_P (alloc_fn))
+ {
+ tree first_arg = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (alloc_fn)));
+ if ((TREE_VALUE (first_arg) == ptr_type_node)
+ && TREE_CHAIN (first_arg) == void_list_node)
+ return true;
+ }
+ return false;
+}
+
/* Generate code for a new-expression, including calling the "operator
new" function, initializing the object, and, if an exception occurs
during construction, cleaning up. The arguments are as for
tree fnname;
tree fns;
- fnname = cp_operator_id (array_p ? VEC_NEW_EXPR : NEW_EXPR);
+ fnname = ovl_op_identifier (false, array_p ? VEC_NEW_EXPR : NEW_EXPR);
member_new_p = !globally_qualified_p
&& CLASS_TYPE_P (elt_type)
|| CP_DECL_CONTEXT (alloc_fn) == global_namespace)
&& !aligned_allocation_fn_p (alloc_fn))
{
- warning (OPT_Waligned_new_, "%<new%> of type %qT with extended "
- "alignment %d", elt_type, TYPE_ALIGN_UNIT (elt_type));
- inform (input_location, "uses %qD, which does not have an alignment "
- "parameter", alloc_fn);
- if (!aligned_new_threshold)
- inform (input_location, "use %<-faligned-new%> to enable C++17 "
- "over-aligned new support");
+ if (warning (OPT_Waligned_new_, "%<new%> of type %qT with extended "
+ "alignment %d", elt_type, TYPE_ALIGN_UNIT (elt_type)))
+ {
+ inform (input_location, "uses %qD, which does not have an alignment "
+ "parameter", alloc_fn);
+ if (!aligned_new_threshold)
+ inform (input_location, "use %<-faligned-new%> to enable C++17 "
+ "over-aligned new support");
+ }
}
/* If we found a simple case of PLACEMENT_EXPR above, then copy it
So check for a null exception spec on the op new we just called. */
nothrow = TYPE_NOTHROW_P (TREE_TYPE (alloc_fn));
- check_new = (flag_check_new || nothrow);
+ check_new
+ = flag_check_new || (nothrow && !std_placement_new_fn_p (alloc_fn));
if (cookie_size)
{
alloc_node, cookie_ptr);
size_ptr_type = build_pointer_type (sizetype);
cookie_ptr = fold_convert (size_ptr_type, cookie_ptr);
- cookie = cp_build_indirect_ref (cookie_ptr, RO_NULL, complain);
+ cookie = cp_build_fold_indirect_ref (cookie_ptr);
cookie_expr = build2 (MODIFY_EXPR, sizetype, cookie, nelts);
NEGATE_EXPR, sizetype,
size_in_bytes (sizetype)));
- cookie = cp_build_indirect_ref (cookie_ptr, RO_NULL, complain);
+ cookie = cp_build_fold_indirect_ref (cookie_ptr);
cookie = build2 (MODIFY_EXPR, sizetype, cookie,
size_in_bytes (elt_type));
cookie_expr = build2 (COMPOUND_EXPR, TREE_TYPE (cookie_expr),
the initializer anyway since we're going to throw it away and
rebuild it at instantiation time, so just build up a single
constructor call to get any appropriate diagnostics. */
- init_expr = cp_build_indirect_ref (data_addr, RO_NULL, complain);
+ init_expr = cp_build_fold_indirect_ref (data_addr);
if (type_build_ctor_call (elt_type))
init_expr = build_special_member_call (init_expr,
complete_ctor_identifier,
}
else
{
- init_expr = cp_build_indirect_ref (data_addr, RO_NULL, complain);
+ init_expr = cp_build_fold_indirect_ref (data_addr);
if (type_build_ctor_call (type) && !explicit_value_init_p)
{
object being initialized, replace them now and don't try to
preevaluate. */
bool had_placeholder = false;
- if (cxx_dialect >= cxx14
- && !processing_template_decl
+ if (!processing_template_decl
&& TREE_CODE (init_expr) == INIT_EXPR)
TREE_OPERAND (init_expr, 1)
= replace_placeholders (TREE_OPERAND (init_expr, 1),
d_init = (**init)[0];
d_init = resolve_nondeduced_context (d_init, complain);
}
- type = do_auto_deduction (type, d_init, auto_node);
+ type = do_auto_deduction (type, d_init, auto_node, complain);
}
}
return is_trivially_xible (MODIFY_EXPR, inner_elt_type, fromtype);
}
+/* Subroutine of build_vec_init: Check that the array has at least N
+ elements. Other parameters are local variables in build_vec_init. */
+
+void
+finish_length_check (tree atype, tree iterator, tree obase, unsigned n)
+{
+ tree nelts = build_int_cst (ptrdiff_type_node, n - 1);
+ if (TREE_CODE (atype) != ARRAY_TYPE)
+ {
+ if (flag_exceptions)
+ {
+ tree c = fold_build2 (LT_EXPR, boolean_type_node, iterator,
+ nelts);
+ c = build3 (COND_EXPR, void_type_node, c,
+ throw_bad_array_new_length (), void_node);
+ finish_expr_stmt (c);
+ }
+ /* Don't check an array new when -fno-exceptions. */
+ }
+ else if (sanitize_flags_p (SANITIZE_BOUNDS)
+ && current_function_decl != NULL_TREE)
+ {
+ /* Make sure the last element of the initializer is in bounds. */
+ finish_expr_stmt
+ (ubsan_instrument_bounds
+ (input_location, obase, &nelts, /*ignore_off_by_one*/false));
+ }
+}
+
/* `build_vec_init' returns tree structure that performs
initialization of a vector of aggregate types.
tree obase = base;
bool xvalue = false;
bool errors = false;
+ location_t loc = (init ? EXPR_LOC_OR_LOC (init, input_location)
+ : location_of (base));
if (TREE_CODE (atype) == ARRAY_TYPE && TYPE_DOMAIN (atype))
maxindex = array_type_nelts (atype);
}
}
- /* If we have a braced-init-list, make sure that the array
+ /* If we have a braced-init-list or string constant, make sure that the array
is big enough for all the initializers. */
- bool length_check = (init && TREE_CODE (init) == CONSTRUCTOR
- && CONSTRUCTOR_NELTS (init) > 0
+ bool length_check = (init
+ && (TREE_CODE (init) == STRING_CST
+ || (TREE_CODE (init) == CONSTRUCTOR
+ && CONSTRUCTOR_NELTS (init) > 0))
&& !TREE_CONSTANT (maxindex));
if (init
? vec_copy_assign_is_trivial (inner_elt_type, init)
: !TYPE_NEEDS_CONSTRUCTING (type))
&& ((TREE_CODE (init) == CONSTRUCTOR
+ && (BRACE_ENCLOSED_INITIALIZER_P (init)
+ || (same_type_ignoring_top_level_qualifiers_p
+ (atype, TREE_TYPE (init))))
/* Don't do this if the CONSTRUCTOR might contain something
that might throw and require us to clean up. */
&& (vec_safe_is_empty (CONSTRUCTOR_ELTS (init))
from_array = 0;
if (length_check)
- {
- tree nelts = build_int_cst (ptrdiff_type_node,
- CONSTRUCTOR_NELTS (init) - 1);
- if (TREE_CODE (atype) != ARRAY_TYPE)
- {
- if (flag_exceptions)
- {
- tree c = fold_build2 (LT_EXPR, boolean_type_node, iterator,
- nelts);
- c = build3 (COND_EXPR, void_type_node, c,
- throw_bad_array_new_length (), void_node);
- finish_expr_stmt (c);
- }
- /* Don't check an array new when -fno-exceptions. */
- }
- else if (flag_sanitize & SANITIZE_BOUNDS
- && do_ubsan_in_current_function ())
- {
- /* Make sure the last element of the initializer is in bounds. */
- finish_expr_stmt
- (ubsan_instrument_bounds
- (input_location, obase, &nelts, /*ignore_off_by_one*/false));
- }
- }
+ finish_length_check (atype, iterator, obase, CONSTRUCTOR_NELTS (init));
if (try_const)
vec_alloc (const_vec, CONSTRUCTOR_NELTS (init));
/* Any elements without explicit initializers get T{}. */
empty_list = true;
}
+ else if (init && TREE_CODE (init) == STRING_CST)
+ {
+ /* Check that the array is at least as long as the string. */
+ if (length_check)
+ finish_length_check (atype, iterator, obase,
+ TREE_STRING_LENGTH (init));
+ tree length = build_int_cst (ptrdiff_type_node,
+ TREE_STRING_LENGTH (init));
+
+ /* Copy the string to the first part of the array. */
+ tree alias_set = build_int_cst (build_pointer_type (type), 0);
+ tree lhs = build2 (MEM_REF, TREE_TYPE (init), base, alias_set);
+ tree stmt = build2 (MODIFY_EXPR, void_type_node, lhs, init);
+ finish_expr_stmt (stmt);
+
+ /* Adjust the counter and pointer. */
+ stmt = cp_build_binary_op (loc, MINUS_EXPR, iterator, length, complain);
+ stmt = build2 (MODIFY_EXPR, void_type_node, iterator, stmt);
+ finish_expr_stmt (stmt);
+
+ stmt = cp_build_binary_op (loc, PLUS_EXPR, base, length, complain);
+ stmt = build2 (MODIFY_EXPR, void_type_node, base, stmt);
+ finish_expr_stmt (stmt);
+
+ /* And set the rest of the array to NUL. */
+ from_array = 0;
+ explicit_value_init_p = true;
+ }
else if (from_array)
{
if (init)
finish_init_stmt (for_stmt);
finish_for_cond (build2 (GT_EXPR, boolean_type_node, iterator,
build_int_cst (TREE_TYPE (iterator), -1)),
- for_stmt, false);
+ for_stmt, false, 0);
elt_init = cp_build_unary_op (PREDECREMENT_EXPR, iterator, false,
complain);
if (elt_init == error_mark_node)
if (TREE_CODE (init) == TREE_LIST)
init = build_x_compound_expr_from_list (init, ELK_INIT,
complain);
- elt_init = build2 (INIT_EXPR, type, to, init);
+ elt_init = (init == error_mark_node
+ ? error_mark_node
+ : build2 (INIT_EXPR, type, to, init));
}
}
{
atype = build_pointer_type (atype);
stmt_expr = build1 (NOP_EXPR, atype, stmt_expr);
- stmt_expr = cp_build_indirect_ref (stmt_expr, RO_NULL, complain);
+ stmt_expr = cp_build_fold_indirect_ref (stmt_expr);
TREE_NO_WARNING (stmt_expr) = 1;
}
&& MAYBE_CLASS_TYPE_P (type) && !CLASSTYPE_FINAL (type)
&& TYPE_POLYMORPHIC_P (type))
{
- tree dtor;
- dtor = CLASSTYPE_DESTRUCTORS (type);
+ tree dtor = CLASSTYPE_DESTRUCTOR (type);
if (!dtor || !DECL_VINDEX (dtor))
{
if (CLASSTYPE_PURE_VIRTUALS (type))
/* Make sure the destructor is callable. */
if (type_build_dtor_call (type))
{
- expr = build_dtor_call (cp_build_indirect_ref (addr, RO_NULL,
- complain),
+ expr = build_dtor_call (cp_build_fold_indirect_ref (addr),
sfk_complete_destructor, flags, complain);
if (expr == error_mark_node)
return error_mark_node;
/* If the destructor is non-virtual, there is no deleting
variant. Instead, we must explicitly call the appropriate
`operator delete' here. */
- else if (!DECL_VIRTUAL_P (CLASSTYPE_DESTRUCTORS (type))
+ else if (!DECL_VIRTUAL_P (CLASSTYPE_DESTRUCTOR (type))
&& auto_delete == sfk_deleting_destructor)
{
/* We will use ADDR multiple times so we must save it. */
complain);
}
- expr = build_dtor_call (cp_build_indirect_ref (addr, RO_NULL, complain),
+ expr = build_dtor_call (cp_build_fold_indirect_ref (addr),
auto_delete, flags, complain);
if (expr == error_mark_node)
return error_mark_node;
sizetype, TYPE_SIZE_UNIT (sizetype));
cookie_addr = fold_build_pointer_plus (fold_convert (size_ptr_type, base),
cookie_addr);
- maxindex = cp_build_indirect_ref (cookie_addr, RO_NULL, complain);
+ maxindex = cp_build_fold_indirect_ref (cookie_addr);
}
else if (TREE_CODE (type) == ARRAY_TYPE)
{
return rval;
}
+
+#include "gt-cp-init.h"