{ "__auto_type", RID_AUTO_TYPE, D_CONLY },
{ "__bases", RID_BASES, D_CXXONLY },
{ "__builtin_addressof", RID_ADDRESSOF, D_CXXONLY },
+ { "__builtin_bit_cast", RID_BUILTIN_BIT_CAST, D_CXXONLY },
{ "__builtin_call_with_static_chain",
RID_BUILTIN_CALL_WITH_STATIC_CHAIN, D_CONLY },
{ "__builtin_choose_expr", RID_CHOOSE_EXPR, D_CONLY },
RID_HAS_NOTHROW_COPY, RID_HAS_TRIVIAL_ASSIGN,
RID_HAS_TRIVIAL_CONSTRUCTOR, RID_HAS_TRIVIAL_COPY,
RID_HAS_TRIVIAL_DESTRUCTOR, RID_HAS_UNIQUE_OBJ_REPRESENTATIONS,
- RID_HAS_VIRTUAL_DESTRUCTOR,
+ RID_HAS_VIRTUAL_DESTRUCTOR, RID_BUILTIN_BIT_CAST,
RID_IS_ABSTRACT, RID_IS_AGGREGATE,
RID_IS_BASE_OF, RID_IS_CLASS,
RID_IS_EMPTY, RID_IS_ENUM,
return error_mark_node;
}
+/* Helper for cxx_eval_bit_cast.
+ Check [bit.cast]/3 rules, bit_cast is constexpr only if the To and From
+ types and types of all subobjects have is_union_v<T>, is_pointer_v<T>,
+ is_member_pointer_v<T>, is_volatile_v<T> false and has no non-static
+ data members of reference type. */
+
+static bool
+check_bit_cast_type (const constexpr_ctx *ctx, location_t loc, tree type,
+ tree orig_type)
+{
+ if (TREE_CODE (type) == UNION_TYPE)
+ {
+ if (!ctx->quiet)
+ {
+ if (type == orig_type)
+ error_at (loc, "%qs is not a constant expression because %qT is "
+ "a union type", "__builtin_bit_cast", type);
+ else
+ error_at (loc, "%qs is not a constant expression because %qT "
+ "contains a union type", "__builtin_bit_cast",
+ orig_type);
+ }
+ return true;
+ }
+ if (TREE_CODE (type) == POINTER_TYPE)
+ {
+ if (!ctx->quiet)
+ {
+ if (type == orig_type)
+ error_at (loc, "%qs is not a constant expression because %qT is "
+ "a pointer type", "__builtin_bit_cast", type);
+ else
+ error_at (loc, "%qs is not a constant expression because %qT "
+ "contains a pointer type", "__builtin_bit_cast",
+ orig_type);
+ }
+ return true;
+ }
+ if (TREE_CODE (type) == REFERENCE_TYPE)
+ {
+ if (!ctx->quiet)
+ {
+ if (type == orig_type)
+ error_at (loc, "%qs is not a constant expression because %qT is "
+ "a reference type", "__builtin_bit_cast", type);
+ else
+ error_at (loc, "%qs is not a constant expression because %qT "
+ "contains a reference type", "__builtin_bit_cast",
+ orig_type);
+ }
+ return true;
+ }
+ if (TYPE_PTRMEM_P (type))
+ {
+ if (!ctx->quiet)
+ {
+ if (type == orig_type)
+ error_at (loc, "%qs is not a constant expression because %qT is "
+ "a pointer to member type", "__builtin_bit_cast",
+ type);
+ else
+ error_at (loc, "%qs is not a constant expression because %qT "
+ "contains a pointer to member type",
+ "__builtin_bit_cast", orig_type);
+ }
+ return true;
+ }
+ if (TYPE_VOLATILE (type))
+ {
+ if (!ctx->quiet)
+ {
+ if (type == orig_type)
+ error_at (loc, "%qs is not a constant expression because %qT is "
+ "volatile", "__builtin_bit_cast", type);
+ else
+ error_at (loc, "%qs is not a constant expression because %qT "
+ "contains a volatile subobject",
+ "__builtin_bit_cast", orig_type);
+ }
+ return true;
+ }
+ if (TREE_CODE (type) == RECORD_TYPE)
+ for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
+ if (TREE_CODE (field) == FIELD_DECL
+ && check_bit_cast_type (ctx, loc, TREE_TYPE (field), orig_type))
+ return true;
+ return false;
+}
+
+/* Subroutine of cxx_eval_constant_expression.
+ Attempt to evaluate a BIT_CAST_EXPR. */
+
+static tree
+cxx_eval_bit_cast (const constexpr_ctx *ctx, tree t, bool *non_constant_p,
+ bool *overflow_p)
+{
+ if (check_bit_cast_type (ctx, EXPR_LOCATION (t), TREE_TYPE (t),
+ TREE_TYPE (t))
+ || check_bit_cast_type (ctx, cp_expr_loc_or_loc (TREE_OPERAND (t, 0),
+ EXPR_LOCATION (t)),
+ TREE_TYPE (TREE_OPERAND (t, 0)),
+ TREE_TYPE (TREE_OPERAND (t, 0))))
+ {
+ *non_constant_p = true;
+ return t;
+ }
+
+ tree op = cxx_eval_constant_expression (ctx, TREE_OPERAND (t, 0), false,
+ non_constant_p, overflow_p);
+ if (*non_constant_p)
+ return t;
+
+ location_t loc = EXPR_LOCATION (t);
+ if (BITS_PER_UNIT != 8 || CHAR_BIT != 8)
+ {
+ if (!ctx->quiet)
+ sorry_at (loc, "%qs cannot be constant evaluated on the target",
+ "__builtin_bit_cast");
+ *non_constant_p = true;
+ return t;
+ }
+
+ if (!tree_fits_shwi_p (TYPE_SIZE_UNIT (TREE_TYPE (t))))
+ {
+ if (!ctx->quiet)
+ sorry_at (loc, "%qs cannot be constant evaluated because the "
+ "type is too large", "__builtin_bit_cast");
+ *non_constant_p = true;
+ return t;
+ }
+
+ HOST_WIDE_INT len = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (t)));
+ if (len < 0 || (int) len != len)
+ {
+ if (!ctx->quiet)
+ sorry_at (loc, "%qs cannot be constant evaluated because the "
+ "type is too large", "__builtin_bit_cast");
+ *non_constant_p = true;
+ return t;
+ }
+
+ unsigned char buf[64];
+ unsigned char *ptr, *mask;
+ size_t alen = (size_t) len * 2;
+ if (alen <= sizeof (buf))
+ ptr = buf;
+ else
+ ptr = XNEWVEC (unsigned char, alen);
+ mask = ptr + (size_t) len;
+ /* At the beginning consider everything indeterminate. */
+ memset (mask, ~0, (size_t) len);
+
+ if (native_encode_initializer (op, ptr, len, 0, mask) != len)
+ {
+ if (!ctx->quiet)
+ sorry_at (loc, "%qs cannot be constant evaluated because the "
+ "argument cannot be encoded", "__builtin_bit_cast");
+ *non_constant_p = true;
+ if (ptr != buf)
+ XDELETE (ptr);
+ return t;
+ }
+
+ tree r = NULL_TREE;
+ if (can_native_interpret_type_p (TREE_TYPE (t)))
+ r = native_interpret_expr (TREE_TYPE (t), ptr, len);
+ else if (TREE_CODE (TREE_TYPE (t)) == RECORD_TYPE)
+ {
+ r = native_interpret_aggregate (TREE_TYPE (t), ptr, 0, len);
+ if (r != NULL_TREE)
+ clear_type_padding_in_mask (TREE_TYPE (t), mask);
+ }
+
+ if (r != NULL_TREE)
+ {
+ for (int i = 0; i < len; i++)
+ if (mask[i])
+ {
+ if (!ctx->quiet)
+ error_at (loc, "%qs accessing uninitialized byte at offset %d",
+ "__builtin_bit_cast", i);
+ *non_constant_p = true;
+ r = t;
+ break;
+ }
+ if (ptr != buf)
+ XDELETE (ptr);
+ return r;
+ }
+
+ if (!ctx->quiet)
+ sorry_at (loc, "%qs cannot be constant evaluated because the "
+ "argument cannot be interpreted", "__builtin_bit_cast");
+ *non_constant_p = true;
+ if (ptr != buf)
+ XDELETE (ptr);
+ return t;
+}
+
/* Subroutine of cxx_eval_constant_expression.
Evaluate a short-circuited logical expression T in the context
of a given constexpr CALL. BAILOUT_VALUE is the value for
*non_constant_p = true;
return t;
+ case BIT_CAST_EXPR:
+ r = cxx_eval_bit_cast (ctx, t, non_constant_p, overflow_p);
+ break;
+
default:
if (STATEMENT_CODE_P (TREE_CODE (t)))
{
case ANNOTATE_EXPR:
return RECUR (TREE_OPERAND (t, 0), rval);
+ case BIT_CAST_EXPR:
+ return RECUR (TREE_OPERAND (t, 0), rval);
+
/* Coroutine await, yield and return expressions are not. */
case CO_AWAIT_EXPR:
case CO_YIELD_EXPR:
cp_genericize_r, cp_walk_subtrees);
break;
+ case BIT_CAST_EXPR:
+ *stmt_p = build1_loc (EXPR_LOCATION (stmt), VIEW_CONVERT_EXPR,
+ TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
+ break;
+
default:
if (IS_TYPE_OR_DECL_P (stmt))
*walk_subtrees = 0;
case RID_BUILTIN_HAS_ATTRIBUTE:
case RID_BUILTIN_SHUFFLE:
case RID_BUILTIN_LAUNDER:
+ case RID_BUILTIN_BIT_CAST:
case RID_OFFSETOF:
case RID_HAS_NOTHROW_ASSIGN:
case RID_HAS_NOTHROW_CONSTRUCTOR:
MARK_TS_EXP (ALIGNOF_EXPR);
MARK_TS_EXP (ARROW_EXPR);
MARK_TS_EXP (AT_ENCODE_EXPR);
+ MARK_TS_EXP (BIT_CAST_EXPR);
MARK_TS_EXP (CAST_EXPR);
MARK_TS_EXP (CONST_CAST_EXPR);
MARK_TS_EXP (CTOR_INITIALIZER);
DEFTREECODE (BINARY_LEFT_FOLD_EXPR, "binary_left_fold_expr", tcc_expression, 3)
DEFTREECODE (BINARY_RIGHT_FOLD_EXPR, "binary_right_fold_expr", tcc_expression, 3)
+/* Represents the __builtin_bit_cast (type, expr) expression.
+ The type is in TREE_TYPE, expression in TREE_OPERAND (bitcast, 0). */
+DEFTREECODE (BIT_CAST_EXPR, "bit_cast_expr", tcc_expression, 1)
/** C++ extensions. */
tsubst_flags_t);
extern tree cp_build_vec_convert (tree, location_t, tree,
tsubst_flags_t);
+extern tree cp_build_bit_cast (location_t, tree, tree,
+ tsubst_flags_t);
extern void start_lambda_scope (tree);
extern void record_lambda_scope (tree);
extern void record_null_lambda_scope (tree);
pp_right_paren (this);
break;
+ case BIT_CAST_EXPR:
+ pp_cxx_ws_string (this, "__builtin_bit_cast");
+ pp_left_paren (this);
+ type_id (TREE_TYPE (t));
+ pp_comma (this);
+ expression (TREE_OPERAND (t, 0));
+ pp_right_paren (this);
+ break;
+
case EMPTY_CLASS_EXPR:
type_id (TREE_TYPE (t));
pp_left_paren (this);
tf_warning_or_error);
}
+ case RID_BUILTIN_BIT_CAST:
+ {
+ tree expression;
+ tree type;
+ /* Consume the `__builtin_bit_cast' token. */
+ cp_lexer_consume_token (parser->lexer);
+ /* Look for the opening `('. */
+ matching_parens parens;
+ parens.require_open (parser);
+ location_t type_location
+ = cp_lexer_peek_token (parser->lexer)->location;
+ /* Parse the type-id. */
+ {
+ type_id_in_expr_sentinel s (parser);
+ type = cp_parser_type_id (parser);
+ }
+ /* Look for the `,'. */
+ cp_parser_require (parser, CPP_COMMA, RT_COMMA);
+ /* Now, parse the assignment-expression. */
+ expression = cp_parser_assignment_expression (parser);
+ /* Look for the closing `)'. */
+ parens.require_close (parser);
+ return cp_build_bit_cast (type_location, type, expression,
+ tf_warning_or_error);
+ }
+
default:
{
tree type;
return build1 (code, type, op0);
}
+ case BIT_CAST_EXPR:
+ {
+ tree type = tsubst (TREE_TYPE (t), args, complain, in_decl);
+ tree op0 = tsubst_copy (TREE_OPERAND (t, 0), args, complain, in_decl);
+ return cp_build_bit_cast (EXPR_LOCATION (t), type, op0, complain);
+ }
+
case SIZEOF_EXPR:
if (PACK_EXPANSION_P (TREE_OPERAND (t, 0))
|| ARGUMENT_PACK_P (TREE_OPERAND (t, 0)))
return build_call_expr_internal_loc (loc, IFN_VEC_CONVERT, type, 1, arg);
}
+/* Finish __builtin_bit_cast (type, arg). */
+
+tree
+cp_build_bit_cast (location_t loc, tree type, tree arg,
+ tsubst_flags_t complain)
+{
+ if (error_operand_p (type))
+ return error_mark_node;
+ if (!dependent_type_p (type))
+ {
+ if (!complete_type_or_maybe_complain (type, NULL_TREE, complain))
+ return error_mark_node;
+ if (TREE_CODE (type) == ARRAY_TYPE)
+ {
+ /* std::bit_cast for destination ARRAY_TYPE is not possible,
+ as functions may not return an array, so don't bother trying
+ to support this (and then deal with VLAs etc.). */
+ error_at (loc, "%<__builtin_bit_cast%> destination type %qT "
+ "is an array type", type);
+ return error_mark_node;
+ }
+ if (!trivially_copyable_p (type))
+ {
+ error_at (loc, "%<__builtin_bit_cast%> destination type %qT "
+ "is not trivially copyable", type);
+ return error_mark_node;
+ }
+ }
+
+ if (error_operand_p (arg))
+ return error_mark_node;
+
+ if (!type_dependent_expression_p (arg))
+ {
+ if (TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE)
+ {
+ /* Don't perform array-to-pointer conversion. */
+ arg = mark_rvalue_use (arg, loc, true);
+ if (!complete_type_or_maybe_complain (TREE_TYPE (arg), arg, complain))
+ return error_mark_node;
+ }
+ else
+ arg = decay_conversion (arg, complain);
+
+ if (error_operand_p (arg))
+ return error_mark_node;
+
+ if (!trivially_copyable_p (TREE_TYPE (arg)))
+ {
+ error_at (cp_expr_loc_or_loc (arg, loc),
+ "%<__builtin_bit_cast%> source type %qT "
+ "is not trivially copyable", TREE_TYPE (arg));
+ return error_mark_node;
+ }
+ if (!dependent_type_p (type)
+ && !cp_tree_equal (TYPE_SIZE_UNIT (type),
+ TYPE_SIZE_UNIT (TREE_TYPE (arg))))
+ {
+ error_at (loc, "%<__builtin_bit_cast%> source size %qE "
+ "not equal to destination type size %qE",
+ TYPE_SIZE_UNIT (TREE_TYPE (arg)),
+ TYPE_SIZE_UNIT (type));
+ return error_mark_node;
+ }
+ }
+
+ tree ret = build_min (BIT_CAST_EXPR, type, arg);
+ SET_EXPR_LOCATION (ret, loc);
+ return ret;
+}
+
#include "gt-cp-semantics.h"
CASE_CONVERT:
case NON_LVALUE_EXPR:
case VIEW_CONVERT_EXPR:
+ case BIT_CAST_EXPR:
if (!same_type_p (TREE_TYPE (t1), TREE_TYPE (t2)))
return false;
/* Now compare operands as usual. */
case CONST_CAST_EXPR:
case DYNAMIC_CAST_EXPR:
case IMPLICIT_CONV_EXPR:
+ case BIT_CAST_EXPR:
if (TREE_TYPE (*tp))
WALK_SUBTREE (TREE_TYPE (*tp));
bitwise compared to some other object, for example for atomic operations.
@end deftypefn
+@deftypefn {Built-in Function} @var{type} __builtin_bit_cast (@var{type}, @var{arg})
+The @code{__builtin_bit_cast} function is available only
+in C++. The built-in is intended to be used by implementations of
+the @code{std::bit_cast} C++ template function. Programs should make
+use of the latter function rather than invoking the built-in directly.
+
+This built-in function allows reinterpreting the bits of the @var{arg}
+argument as if it had type @var{type}. @var{type} and the type of the
+@var{arg} argument need to be trivially copyable types with the same size.
+When manifestly constant-evaluated, it performs extra diagnostics required
+for @code{std::bit_cast} and returns a constant expression if @var{arg}
+is a constant expression. For more details
+refer to the latest revision of the C++ standard.
+@end deftypefn
+
@deftypefn {Built-in Function} long __builtin_expect (long @var{exp}, long @var{c})
@opindex fprofile-arcs
You may use @code{__builtin_expect} to provide the compiler with
}
}
+/* Try to find a type whose byte size is smaller or equal to LEN bytes larger
+ or equal to FIELDSIZE bytes, with underlying mode precision/size multiple
+ of BITS_PER_UNIT. As native_{interpret,encode}_int works in term of
+ machine modes, we can't just use build_nonstandard_integer_type. */
+
+tree
+find_bitfield_repr_type (int fieldsize, int len)
+{
+ machine_mode mode;
+ for (int pass = 0; pass < 2; pass++)
+ {
+ enum mode_class mclass = pass ? MODE_PARTIAL_INT : MODE_INT;
+ FOR_EACH_MODE_IN_CLASS (mode, mclass)
+ if (known_ge (GET_MODE_SIZE (mode), fieldsize)
+ && known_eq (GET_MODE_PRECISION (mode),
+ GET_MODE_BITSIZE (mode))
+ && known_le (GET_MODE_SIZE (mode), len))
+ {
+ tree ret = lang_hooks.types.type_for_mode (mode, 1);
+ if (ret && TYPE_MODE (ret) == mode)
+ return ret;
+ }
+ }
+
+ for (int i = 0; i < NUM_INT_N_ENTS; i ++)
+ if (int_n_enabled_p[i]
+ && int_n_data[i].bitsize >= (unsigned) (BITS_PER_UNIT * fieldsize)
+ && int_n_trees[i].unsigned_type)
+ {
+ tree ret = int_n_trees[i].unsigned_type;
+ mode = TYPE_MODE (ret);
+ if (known_ge (GET_MODE_SIZE (mode), fieldsize)
+ && known_eq (GET_MODE_PRECISION (mode),
+ GET_MODE_BITSIZE (mode))
+ && known_le (GET_MODE_SIZE (mode), len))
+ return ret;
+ }
+
+ return NULL_TREE;
+}
+
/* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
- NON_LVALUE_EXPRs and nops. */
+ NON_LVALUE_EXPRs and nops. If MASK is non-NULL (then PTR has
+ to be non-NULL and OFF zero), then in addition to filling the
+ bytes pointed by PTR with the value also clear any bits pointed
+ by MASK that are known to be initialized, keep them as is for
+ e.g. uninitialized padding bits or uninitialized fields. */
int
native_encode_initializer (tree init, unsigned char *ptr, int len,
- int off)
+ int off, unsigned char *mask)
{
+ int r;
+
/* We don't support starting at negative offset and -1 is special. */
if (off < -1 || init == NULL_TREE)
return 0;
+ gcc_assert (mask == NULL || (off == 0 && ptr));
+
STRIP_NOPS (init);
switch (TREE_CODE (init))
{
case VIEW_CONVERT_EXPR:
case NON_LVALUE_EXPR:
- return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off);
+ return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off,
+ mask);
default:
- return native_encode_expr (init, ptr, len, off);
+ r = native_encode_expr (init, ptr, len, off);
+ if (mask)
+ memset (mask, 0, r);
+ return r;
case CONSTRUCTOR:
tree type = TREE_TYPE (init);
HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
{
HOST_WIDE_INT min_index;
unsigned HOST_WIDE_INT cnt;
- HOST_WIDE_INT curpos = 0, fieldsize;
+ HOST_WIDE_INT curpos = 0, fieldsize, valueinit = -1;
constructor_elt *ce;
if (TYPE_DOMAIN (type) == NULL_TREE
if (ptr != NULL)
memset (ptr, '\0', MIN (total_bytes - off, len));
- FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
+ for (cnt = 0; ; cnt++)
{
- tree val = ce->value;
- tree index = ce->index;
+ tree val = NULL_TREE, index = NULL_TREE;
HOST_WIDE_INT pos = curpos, count = 0;
bool full = false;
+ if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
+ {
+ val = ce->value;
+ index = ce->index;
+ }
+ else if (mask == NULL
+ || CONSTRUCTOR_NO_CLEARING (init)
+ || curpos >= total_bytes)
+ break;
+ else
+ pos = total_bytes;
if (index && TREE_CODE (index) == RANGE_EXPR)
{
if (!tree_fits_shwi_p (TREE_OPERAND (index, 0))
pos = (tree_to_shwi (index) - min_index) * fieldsize;
}
+ if (mask && !CONSTRUCTOR_NO_CLEARING (init) && curpos != pos)
+ {
+ if (valueinit == -1)
+ {
+ tree zero = build_constructor (TREE_TYPE (type), NULL);
+ r = native_encode_initializer (zero, ptr + curpos,
+ fieldsize, 0,
+ mask + curpos);
+ ggc_free (zero);
+ if (!r)
+ return 0;
+ valueinit = curpos;
+ curpos += fieldsize;
+ }
+ while (curpos != pos)
+ {
+ memcpy (ptr + curpos, ptr + valueinit, fieldsize);
+ memcpy (mask + curpos, mask + valueinit, fieldsize);
+ curpos += fieldsize;
+ }
+ }
+
curpos = pos;
if (val)
do
if (ptr)
memcpy (ptr + (curpos - o), ptr + (pos - o),
fieldsize);
+ if (mask)
+ memcpy (mask + curpos, mask + pos, fieldsize);
}
else if (!native_encode_initializer (val,
ptr
: NULL,
fieldsize,
off == -1 ? -1
- : 0))
+ : 0,
+ mask
+ ? mask + curpos
+ : NULL))
return 0;
else
{
unsigned char *p = NULL;
int no = 0;
int l;
+ gcc_assert (mask == NULL);
if (curpos >= off)
{
if (ptr)
no = off - curpos;
l = len;
}
- if (!native_encode_initializer (val, p, l, no))
+ if (!native_encode_initializer (val, p, l, no, NULL))
return 0;
}
curpos += fieldsize;
{
unsigned HOST_WIDE_INT cnt;
constructor_elt *ce;
+ tree fld_base = TYPE_FIELDS (type);
+ tree to_free = NULL_TREE;
+ gcc_assert (TREE_CODE (type) == RECORD_TYPE || mask == NULL);
if (ptr != NULL)
memset (ptr, '\0', MIN (total_bytes - off, len));
- FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
+ for (cnt = 0; ; cnt++)
{
- tree field = ce->index;
- tree val = ce->value;
- HOST_WIDE_INT pos, fieldsize;
+ tree val = NULL_TREE, field = NULL_TREE;
+ HOST_WIDE_INT pos = 0, fieldsize;
unsigned HOST_WIDE_INT bpos = 0, epos = 0;
- if (field == NULL_TREE)
- return 0;
+ if (to_free)
+ {
+ ggc_free (to_free);
+ to_free = NULL_TREE;
+ }
- pos = int_byte_position (field);
- if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
- continue;
+ if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
+ {
+ val = ce->value;
+ field = ce->index;
+ if (field == NULL_TREE)
+ return 0;
+
+ pos = int_byte_position (field);
+ if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
+ continue;
+ }
+ else if (mask == NULL
+ || CONSTRUCTOR_NO_CLEARING (init))
+ break;
+ else
+ pos = total_bytes;
+
+ if (mask && !CONSTRUCTOR_NO_CLEARING (init))
+ {
+ tree fld;
+ for (fld = fld_base; fld; fld = DECL_CHAIN (fld))
+ {
+ if (TREE_CODE (fld) != FIELD_DECL)
+ continue;
+ if (fld == field)
+ break;
+ if (DECL_PADDING_P (fld))
+ continue;
+ if (DECL_SIZE_UNIT (fld) == NULL_TREE
+ || !tree_fits_shwi_p (DECL_SIZE_UNIT (fld)))
+ return 0;
+ if (integer_zerop (DECL_SIZE_UNIT (fld)))
+ continue;
+ break;
+ }
+ if (fld == NULL_TREE)
+ {
+ if (ce == NULL)
+ break;
+ return 0;
+ }
+ fld_base = DECL_CHAIN (fld);
+ if (fld != field)
+ {
+ cnt--;
+ field = fld;
+ val = build_constructor (TREE_TYPE (fld), NULL);
+ to_free = val;
+ }
+ }
if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
&& TYPE_DOMAIN (TREE_TYPE (field))
if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
return 0;
- tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
- if (repr == NULL_TREE
- || TREE_CODE (val) != INTEGER_CST
- || !INTEGRAL_TYPE_P (TREE_TYPE (repr)))
+ if (TREE_CODE (val) != INTEGER_CST)
return 0;
- HOST_WIDE_INT rpos = int_byte_position (repr);
+ tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
+ tree repr_type = NULL_TREE;
+ HOST_WIDE_INT rpos = 0;
+ if (repr && INTEGRAL_TYPE_P (TREE_TYPE (repr)))
+ {
+ rpos = int_byte_position (repr);
+ repr_type = TREE_TYPE (repr);
+ }
+ else
+ {
+ repr_type = find_bitfield_repr_type (fieldsize, len);
+ if (repr_type == NULL_TREE)
+ return 0;
+ HOST_WIDE_INT repr_size = int_size_in_bytes (repr_type);
+ gcc_assert (repr_size > 0 && repr_size <= len);
+ if (pos + repr_size <= len)
+ rpos = pos;
+ else
+ {
+ rpos = len - repr_size;
+ gcc_assert (rpos <= pos);
+ }
+ }
+
if (rpos > pos)
return 0;
- wide_int w = wi::to_wide (val,
- TYPE_PRECISION (TREE_TYPE (repr)));
- int diff = (TYPE_PRECISION (TREE_TYPE (repr))
+ wide_int w = wi::to_wide (val, TYPE_PRECISION (repr_type));
+ int diff = (TYPE_PRECISION (repr_type)
- TYPE_PRECISION (TREE_TYPE (field)));
HOST_WIDE_INT bitoff = (pos - rpos) * BITS_PER_UNIT + bpos;
if (!BYTES_BIG_ENDIAN)
w = wi::lshift (w, bitoff);
else
w = wi::lshift (w, diff - bitoff);
- val = wide_int_to_tree (TREE_TYPE (repr), w);
+ val = wide_int_to_tree (repr_type, w);
unsigned char buf[MAX_BITSIZE_MODE_ANY_INT
/ BITS_PER_UNIT + 1];
int l = native_encode_int (val, buf, sizeof buf, 0);
- if (l * BITS_PER_UNIT != TYPE_PRECISION (TREE_TYPE (repr)))
+ if (l * BITS_PER_UNIT != TYPE_PRECISION (repr_type))
return 0;
if (ptr == NULL)
{
if (!BYTES_BIG_ENDIAN)
{
- int mask = (1 << bpos) - 1;
- buf[pos - rpos] &= ~mask;
- buf[pos - rpos] |= ptr[pos - o] & mask;
+ int msk = (1 << bpos) - 1;
+ buf[pos - rpos] &= ~msk;
+ buf[pos - rpos] |= ptr[pos - o] & msk;
+ if (mask)
+ {
+ if (fieldsize > 1 || epos == 0)
+ mask[pos] &= msk;
+ else
+ mask[pos] &= (msk | ~((1 << epos) - 1));
+ }
}
else
{
- int mask = (1 << (BITS_PER_UNIT - bpos)) - 1;
- buf[pos - rpos] &= mask;
- buf[pos - rpos] |= ptr[pos - o] & ~mask;
+ int msk = (1 << (BITS_PER_UNIT - bpos)) - 1;
+ buf[pos - rpos] &= msk;
+ buf[pos - rpos] |= ptr[pos - o] & ~msk;
+ if (mask)
+ {
+ if (fieldsize > 1 || epos == 0)
+ mask[pos] &= ~msk;
+ else
+ mask[pos] &= (~msk
+ | ((1 << (BITS_PER_UNIT - epos))
+ - 1));
+ }
}
}
/* If the bitfield does not end at byte boundary, handle
{
if (!BYTES_BIG_ENDIAN)
{
- int mask = (1 << epos) - 1;
- buf[pos - rpos + fieldsize - 1] &= mask;
+ int msk = (1 << epos) - 1;
+ buf[pos - rpos + fieldsize - 1] &= msk;
buf[pos - rpos + fieldsize - 1]
- |= ptr[pos + fieldsize - 1 - o] & ~mask;
+ |= ptr[pos + fieldsize - 1 - o] & ~msk;
+ if (mask && (fieldsize > 1 || bpos == 0))
+ mask[pos + fieldsize - 1] &= ~msk;
}
else
{
- int mask = (1 << (BITS_PER_UNIT - epos)) - 1;
- buf[pos - rpos + fieldsize - 1] &= ~mask;
+ int msk = (1 << (BITS_PER_UNIT - epos)) - 1;
+ buf[pos - rpos + fieldsize - 1] &= ~msk;
buf[pos - rpos + fieldsize - 1]
- |= ptr[pos + fieldsize - 1 - o] & mask;
+ |= ptr[pos + fieldsize - 1 - o] & msk;
+ if (mask && (fieldsize > 1 || bpos == 0))
+ mask[pos + fieldsize - 1] &= msk;
}
}
if (off == -1
|| (pos >= off
&& (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
- memcpy (ptr + pos - o, buf + (pos - rpos), fieldsize);
+ {
+ memcpy (ptr + pos - o, buf + (pos - rpos), fieldsize);
+ if (mask && (fieldsize > (bpos != 0) + (epos != 0)))
+ memset (mask + pos + (bpos != 0), 0,
+ fieldsize - (bpos != 0) - (epos != 0));
+ }
else
{
/* Partial overlap. */
HOST_WIDE_INT fsz = fieldsize;
+ gcc_assert (mask == NULL);
if (pos < off)
{
fsz -= (off - pos);
if (!native_encode_initializer (val, ptr ? ptr + pos - o
: NULL,
fieldsize,
- off == -1 ? -1 : 0))
+ off == -1 ? -1 : 0,
+ mask ? mask + pos : NULL))
return 0;
}
else
unsigned char *p = NULL;
int no = 0;
int l;
+ gcc_assert (mask == NULL);
if (pos >= off)
{
if (ptr)
no = off - pos;
l = len;
}
- if (!native_encode_initializer (val, p, l, no))
+ if (!native_encode_initializer (val, p, l, no, NULL))
return 0;
}
}
}
}
+/* Attempt to interpret aggregate of TYPE from bytes encoded in target
+ byte order at PTR + OFF with LEN bytes. Does not handle unions. */
+
+tree
+native_interpret_aggregate (tree type, const unsigned char *ptr, int off,
+ int len)
+{
+ vec<constructor_elt, va_gc> *elts = NULL;
+ if (TREE_CODE (type) == ARRAY_TYPE)
+ {
+ HOST_WIDE_INT eltsz = int_size_in_bytes (TREE_TYPE (type));
+ if (eltsz < 0 || eltsz > len || TYPE_DOMAIN (type) == NULL_TREE)
+ return NULL_TREE;
+
+ HOST_WIDE_INT cnt = 0;
+ if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
+ {
+ if (!tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
+ return NULL_TREE;
+ cnt = tree_to_shwi (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1;
+ }
+ if (eltsz == 0)
+ cnt = 0;
+ HOST_WIDE_INT pos = 0;
+ for (HOST_WIDE_INT i = 0; i < cnt; i++, pos += eltsz)
+ {
+ tree v = NULL_TREE;
+ if (pos >= len || pos + eltsz > len)
+ return NULL_TREE;
+ if (can_native_interpret_type_p (TREE_TYPE (type)))
+ {
+ v = native_interpret_expr (TREE_TYPE (type),
+ ptr + off + pos, eltsz);
+ if (v == NULL_TREE)
+ return NULL_TREE;
+ }
+ else if (TREE_CODE (TREE_TYPE (type)) == RECORD_TYPE
+ || TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
+ v = native_interpret_aggregate (TREE_TYPE (type), ptr, off + pos,
+ eltsz);
+ if (v == NULL_TREE)
+ return NULL_TREE;
+ CONSTRUCTOR_APPEND_ELT (elts, size_int (i), v);
+ }
+ return build_constructor (type, elts);
+ }
+ if (TREE_CODE (type) != RECORD_TYPE)
+ return NULL_TREE;
+ for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
+ {
+ if (TREE_CODE (field) != FIELD_DECL || DECL_PADDING_P (field))
+ continue;
+ tree fld = field;
+ HOST_WIDE_INT bitoff = 0, pos = 0, sz = 0;
+ int diff = 0;
+ tree v = NULL_TREE;
+ if (DECL_BIT_FIELD (field))
+ {
+ fld = DECL_BIT_FIELD_REPRESENTATIVE (field);
+ if (fld && INTEGRAL_TYPE_P (TREE_TYPE (fld)))
+ {
+ poly_int64 bitoffset;
+ poly_uint64 field_offset, fld_offset;
+ if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
+ && poly_int_tree_p (DECL_FIELD_OFFSET (fld), &fld_offset))
+ bitoffset = (field_offset - fld_offset) * BITS_PER_UNIT;
+ else
+ bitoffset = 0;
+ bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
+ - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)));
+ diff = (TYPE_PRECISION (TREE_TYPE (fld))
+ - TYPE_PRECISION (TREE_TYPE (field)));
+ if (!bitoffset.is_constant (&bitoff)
+ || bitoff < 0
+ || bitoff > diff)
+ return NULL_TREE;
+ }
+ else
+ {
+ if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
+ return NULL_TREE;
+ int fieldsize = TYPE_PRECISION (TREE_TYPE (field));
+ int bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
+ bpos %= BITS_PER_UNIT;
+ fieldsize += bpos;
+ fieldsize += BITS_PER_UNIT - 1;
+ fieldsize /= BITS_PER_UNIT;
+ tree repr_type = find_bitfield_repr_type (fieldsize, len);
+ if (repr_type == NULL_TREE)
+ return NULL_TREE;
+ sz = int_size_in_bytes (repr_type);
+ if (sz < 0 || sz > len)
+ return NULL_TREE;
+ pos = int_byte_position (field);
+ if (pos < 0 || pos > len || pos + fieldsize > len)
+ return NULL_TREE;
+ HOST_WIDE_INT rpos;
+ if (pos + sz <= len)
+ rpos = pos;
+ else
+ {
+ rpos = len - sz;
+ gcc_assert (rpos <= pos);
+ }
+ bitoff = (HOST_WIDE_INT) (pos - rpos) * BITS_PER_UNIT + bpos;
+ pos = rpos;
+ diff = (TYPE_PRECISION (repr_type)
+ - TYPE_PRECISION (TREE_TYPE (field)));
+ v = native_interpret_expr (repr_type, ptr + off + pos, sz);
+ if (v == NULL_TREE)
+ return NULL_TREE;
+ fld = NULL_TREE;
+ }
+ }
+
+ if (fld)
+ {
+ sz = int_size_in_bytes (TREE_TYPE (fld));
+ if (sz < 0 || sz > len)
+ return NULL_TREE;
+ tree byte_pos = byte_position (fld);
+ if (!tree_fits_shwi_p (byte_pos))
+ return NULL_TREE;
+ pos = tree_to_shwi (byte_pos);
+ if (pos < 0 || pos > len || pos + sz > len)
+ return NULL_TREE;
+ }
+ if (fld == NULL_TREE)
+ /* Already handled above. */;
+ else if (can_native_interpret_type_p (TREE_TYPE (fld)))
+ {
+ v = native_interpret_expr (TREE_TYPE (fld),
+ ptr + off + pos, sz);
+ if (v == NULL_TREE)
+ return NULL_TREE;
+ }
+ else if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE
+ || TREE_CODE (TREE_TYPE (fld)) == ARRAY_TYPE)
+ v = native_interpret_aggregate (TREE_TYPE (fld), ptr, off + pos, sz);
+ if (v == NULL_TREE)
+ return NULL_TREE;
+ if (fld != field)
+ {
+ if (TREE_CODE (v) != INTEGER_CST)
+ return NULL_TREE;
+
+ /* FIXME: Figure out how to handle PDP endian bitfields. */
+ if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
+ return NULL_TREE;
+ if (!BYTES_BIG_ENDIAN)
+ v = wide_int_to_tree (TREE_TYPE (field),
+ wi::lrshift (wi::to_wide (v), bitoff));
+ else
+ v = wide_int_to_tree (TREE_TYPE (field),
+ wi::lrshift (wi::to_wide (v),
+ diff - bitoff));
+ }
+ CONSTRUCTOR_APPEND_ELT (elts, field, v);
+ }
+ return build_constructor (type, elts);
+}
+
/* Routines for manipulation of native_encode_expr encoded data if the encoded
or extracted constant positions and/or sizes aren't byte aligned. */
/* Convert between trees and native memory representation. */
extern int native_encode_expr (const_tree, unsigned char *, int, int off = -1);
extern int native_encode_initializer (tree, unsigned char *, int,
- int off = -1);
+ int off = -1, unsigned char * = nullptr);
extern tree native_interpret_expr (tree, const unsigned char *, int);
extern bool can_native_interpret_type_p (tree);
+extern tree native_interpret_aggregate (tree, const unsigned char *, int, int);
+extern tree find_bitfield_repr_type (int, int);
extern void shift_bytes_in_array_left (unsigned char *, unsigned int,
unsigned int);
extern void shift_bytes_in_array_right (unsigned char *, unsigned int,
/* Data passed through __builtin_clear_padding folding. */
struct clear_padding_struct {
location_t loc;
+ /* 0 during __builtin_clear_padding folding, nonzero during
+ clear_type_padding_in_mask. In that case, instead of clearing the
+ non-padding bits in union_ptr array clear the padding bits in there. */
+ bool clear_in_mask;
tree base;
tree alias_type;
gimple_stmt_iterator *gsi;
size_t padding_bytes = buf->padding_bytes;
if (buf->union_ptr)
{
+ if (buf->clear_in_mask)
+ {
+ /* During clear_type_padding_in_mask, clear the padding
+ bits set in buf->buf in the buf->union_ptr mask. */
+ for (size_t i = 0; i < end; i++)
+ {
+ if (buf->buf[i] == (unsigned char) ~0)
+ padding_bytes++;
+ else
+ {
+ memset (&buf->union_ptr[buf->off + i - padding_bytes],
+ 0, padding_bytes);
+ padding_bytes = 0;
+ buf->union_ptr[buf->off + i] &= ~buf->buf[i];
+ }
+ }
+ if (full)
+ {
+ memset (&buf->union_ptr[buf->off + end - padding_bytes],
+ 0, padding_bytes);
+ buf->off = 0;
+ buf->size = 0;
+ buf->padding_bytes = 0;
+ }
+ else
+ {
+ memmove (buf->buf, buf->buf + end, buf->size - end);
+ buf->off += end;
+ buf->size -= end;
+ buf->padding_bytes = padding_bytes;
+ }
+ return;
+ }
/* Inside of a union, instead of emitting any code, instead
clear all bits in the union_ptr buffer that are clear
in buf. Whole padding bytes don't clear anything. */
clear_padding_flush (buf, false);
union_buf = XALLOCA (clear_padding_struct);
union_buf->loc = buf->loc;
+ union_buf->clear_in_mask = buf->clear_in_mask;
union_buf->base = NULL_TREE;
union_buf->alias_type = NULL_TREE;
union_buf->gsi = NULL;
continue;
gcc_assert (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
&& !COMPLETE_TYPE_P (TREE_TYPE (field)));
- error_at (buf->loc, "flexible array member %qD does not have "
- "well defined padding bits for %qs",
- field, "__builtin_clear_padding");
+ if (!buf->clear_in_mask)
+ error_at (buf->loc, "flexible array member %qD does not have "
+ "well defined padding bits for %qs",
+ field, "__builtin_clear_padding");
continue;
}
HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
continue;
gcc_assert (TREE_CODE (ftype) == ARRAY_TYPE
&& !COMPLETE_TYPE_P (ftype));
- error_at (buf->loc, "flexible array member %qD does not have "
- "well defined padding bits for %qs",
- field, "__builtin_clear_padding");
+ if (!buf->clear_in_mask)
+ error_at (buf->loc, "flexible array member %qD does not "
+ "have well defined padding bits for %qs",
+ field, "__builtin_clear_padding");
}
else if (is_empty_type (TREE_TYPE (field)))
continue;
}
}
+/* Clear padding bits of TYPE in MASK. */
+
+void
+clear_type_padding_in_mask (tree type, unsigned char *mask)
+{
+ clear_padding_struct buf;
+ buf.loc = UNKNOWN_LOCATION;
+ buf.clear_in_mask = true;
+ buf.base = NULL_TREE;
+ buf.alias_type = NULL_TREE;
+ buf.gsi = NULL;
+ buf.align = 0;
+ buf.off = 0;
+ buf.padding_bytes = 0;
+ buf.sz = int_size_in_bytes (type);
+ buf.size = 0;
+ buf.union_ptr = mask;
+ clear_padding_type (&buf, type, buf.sz);
+ clear_padding_flush (&buf, true);
+}
+
/* Fold __builtin_clear_padding builtin. */
static bool
gsi_prev (&gsiprev);
buf.loc = loc;
+ buf.clear_in_mask = false;
buf.base = ptr;
buf.alias_type = NULL_TREE;
buf.gsi = gsi;
enum tree_code, tree, tree);
extern tree maybe_fold_or_comparisons (tree, enum tree_code, tree, tree,
enum tree_code, tree, tree);
+extern void clear_type_padding_in_mask (tree, unsigned char *);
extern bool optimize_atomic_compare_exchange_p (gimple *);
extern void fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *);
extern bool arith_overflowed_p (enum tree_code, const_tree, const_tree,
--- /dev/null
+// { dg-do compile }
+
+struct S { short a, b; };
+struct T { float a[16]; };
+struct U { int b[16]; };
+
+#if __SIZEOF_FLOAT__ == __SIZEOF_INT__
+int
+f1 (float x)
+{
+ return __builtin_bit_cast (int, x);
+}
+#endif
+
+#if 2 * __SIZEOF_SHORT__ == __SIZEOF_INT__
+S
+f2 (int x)
+{
+ return __builtin_bit_cast (S, x);
+}
+
+int
+f3 (S x)
+{
+ return __builtin_bit_cast (int, x);
+}
+#endif
+
+#if __SIZEOF_FLOAT__ == __SIZEOF_INT__
+U
+f4 (T &x)
+{
+ return __builtin_bit_cast (U, x);
+}
+
+T
+f5 (int (&x)[16])
+{
+ return __builtin_bit_cast (T, x);
+}
+#endif
+
+int
+f6 ()
+{
+ return __builtin_bit_cast (unsigned char, (signed char) 0);
+}
--- /dev/null
+// { dg-do compile }
+
+struct S { ~S (); int s; };
+S s;
+struct V; // { dg-message "forward declaration of 'struct V'" }
+extern V v; // { dg-error "'v' has incomplete type" }
+extern V *p;
+struct U { int a, b; };
+U u;
+
+void
+foo (int *q)
+{
+ __builtin_bit_cast (int, s); // { dg-error "'__builtin_bit_cast' source type 'S' is not trivially copyable" }
+ __builtin_bit_cast (S, 0); // { dg-error "'__builtin_bit_cast' destination type 'S' is not trivially copyable" }
+ __builtin_bit_cast (int &, q); // { dg-error "'__builtin_bit_cast' destination type 'int&' is not trivially copyable" }
+ __builtin_bit_cast (int [1], 0); // { dg-error "'__builtin_bit_cast' destination type \[^\n\r]* is an array type" }
+ __builtin_bit_cast (V, 0); // { dg-error "invalid use of incomplete type 'struct V'" }
+ __builtin_bit_cast (int, v);
+ __builtin_bit_cast (int, *p); // { dg-error "invalid use of incomplete type 'struct V'" }
+ __builtin_bit_cast (U, 0); // { dg-error "'__builtin_bit_cast' source size '\[0-9]*' not equal to destination type size '\[0-9]*'" }
+ __builtin_bit_cast (int, u); // { dg-error "'__builtin_bit_cast' source size '\[0-9]*' not equal to destination type size '\[0-9]*'" }
+}
+
+template <int N>
+void
+bar (int *q)
+{
+ __builtin_bit_cast (int, s); // { dg-error "'__builtin_bit_cast' source type 'S' is not trivially copyable" }
+ __builtin_bit_cast (S, 0); // { dg-error "'__builtin_bit_cast' destination type 'S' is not trivially copyable" }
+ __builtin_bit_cast (int &, q); // { dg-error "'__builtin_bit_cast' destination type 'int&' is not trivially copyable" }
+ __builtin_bit_cast (int [1], 0); // { dg-error "'__builtin_bit_cast' destination type \[^\n\r]* is an array type" }
+ __builtin_bit_cast (V, 0); // { dg-error "invalid use of incomplete type 'struct V'" }
+ __builtin_bit_cast (int, *p); // { dg-error "invalid use of incomplete type 'struct V'" }
+ __builtin_bit_cast (U, 0); // { dg-error "'__builtin_bit_cast' source size '\[0-9]*' not equal to destination type size '\[0-9]*'" }
+ __builtin_bit_cast (int, u); // { dg-error "'__builtin_bit_cast' source size '\[0-9]*' not equal to destination type size '\[0-9]*'" }
+}
+
+template <typename T1, typename T2, typename T3, typename T4>
+void
+baz (T3 s, T4 *p, T1 *q)
+{
+ __builtin_bit_cast (int, s); // { dg-error "'__builtin_bit_cast' source type 'S' is not trivially copyable" }
+ __builtin_bit_cast (T3, 0); // { dg-error "'__builtin_bit_cast' destination type 'S' is not trivially copyable" }
+ __builtin_bit_cast (T1 &, q); // { dg-error "'__builtin_bit_cast' destination type 'int&' is not trivially copyable" }
+ __builtin_bit_cast (T2, 0); // { dg-error "'__builtin_bit_cast' destination type \[^\n\r]* is an array type" }
+ __builtin_bit_cast (T4, 0); // { dg-error "invalid use of incomplete type 'struct V'" }
+ __builtin_bit_cast (int, *p); // { dg-error "invalid use of incomplete type 'struct V'" }
+ __builtin_bit_cast (U, (T1) 0); // { dg-error "'__builtin_bit_cast' source size '\[0-9]*' not equal to destination type size '\[0-9]*'" }
+ __builtin_bit_cast (T1, u); // { dg-error "'__builtin_bit_cast' source size '\[0-9]*' not equal to destination type size '\[0-9]*'" }
+}
+
+void
+qux (int *q)
+{
+ baz <int, int [1], S, V> (s, p, q);
+}
--- /dev/null
+// { dg-do compile { target c++11 } }
+
+template <typename To, typename From>
+constexpr To
+bit_cast (const From &from)
+{
+ return __builtin_bit_cast (To, from);
+}
+
+template <typename To, typename From>
+constexpr bool
+check (const From &from)
+{
+ return bit_cast <From> (bit_cast <To> (from)) == from;
+}
+
+struct A
+{
+ int a, b, c;
+ constexpr bool operator == (const A &x) const
+ {
+ return x.a == a && x.b == b && x.c == c;
+ }
+};
+
+struct B
+{
+ unsigned a[3];
+ constexpr bool operator == (const B &x) const
+ {
+ return x.a[0] == a[0] && x.a[1] == a[1] && x.a[2] == a[2];
+ }
+};
+
+struct C
+{
+ char a[2][3][2];
+ constexpr bool operator == (const C &x) const
+ {
+ return x.a[0][0][0] == a[0][0][0]
+ && x.a[0][0][1] == a[0][0][1]
+ && x.a[0][1][0] == a[0][1][0]
+ && x.a[0][1][1] == a[0][1][1]
+ && x.a[0][2][0] == a[0][2][0]
+ && x.a[0][2][1] == a[0][2][1]
+ && x.a[1][0][0] == a[1][0][0]
+ && x.a[1][0][1] == a[1][0][1]
+ && x.a[1][1][0] == a[1][1][0]
+ && x.a[1][1][1] == a[1][1][1]
+ && x.a[1][2][0] == a[1][2][0]
+ && x.a[1][2][1] == a[1][2][1];
+ }
+};
+
+struct D
+{
+ int a, b;
+ constexpr bool operator == (const D &x) const
+ {
+ return x.a == a && x.b == b;
+ }
+};
+
+struct E {};
+struct F { char c, d, e, f; };
+struct G : public D, E, F
+{
+ int g;
+ constexpr bool operator == (const G &x) const
+ {
+ return x.a == a && x.b == b && x.c == c && x.d == d
+ && x.e == e && x.f == f && x.g == g;
+ }
+};
+
+struct H
+{
+ int a, b[2], c;
+ constexpr bool operator == (const H &x) const
+ {
+ return x.a == a && x.b[0] == b[0] && x.b[1] == b[1] && x.c == c;
+ }
+};
+
+#if __SIZEOF_INT__ == 4
+struct I
+{
+ int a;
+ int b : 3;
+ int c : 24;
+ int d : 5;
+ int e;
+ constexpr bool operator == (const I &x) const
+ {
+ return x.a == a && x.b == b && x.c == c && x.d == d && x.e == e;
+ }
+};
+#endif
+
+#if __SIZEOF_INT__ == 4 && __SIZEOF_LONG_LONG__ == 8
+struct J
+{
+ long long int a, b : 11, c : 3, d : 37, e : 1, f : 10, g : 2, h;
+ constexpr bool operator == (const J &x) const
+ {
+ return x.a == a && x.b == b && x.c == c && x.d == d && x.e == e
+ && x.f == f && x.g == g && x.h == h;
+ }
+};
+
+struct K
+{
+ long long int a, b, c;
+ constexpr bool operator == (const K &x) const
+ {
+ return x.a == a && x.b == b && x.c == c;
+ }
+};
+
+struct M
+{
+ signed a : 6, b : 7, c : 6, d : 5;
+ unsigned char e;
+ unsigned int f;
+ long long int g;
+ constexpr bool operator == (const M &x) const
+ {
+ return x.a == a && x.b == b && x.c == c && x.d == d && x.e == e
+ && x.f == f && x.g == g;
+ }
+};
+
+struct N
+{
+ unsigned long long int a, b;
+ constexpr bool operator == (const N &x) const
+ {
+ return x.a == a && x.b == b;
+ }
+};
+#endif
+
+static_assert (check <unsigned int> (0), "");
+static_assert (check <long long int> (0xdeadbeeffeedbac1ULL), "");
+static_assert (check <signed char> ((unsigned char) 42), "");
+static_assert (check <char> ((unsigned char) 42), "");
+static_assert (check <unsigned char> ((unsigned char) 42), "");
+static_assert (check <signed char> ((signed char) 42), "");
+static_assert (check <char> ((signed char) 42), "");
+static_assert (check <unsigned char> ((signed char) 42), "");
+static_assert (check <signed char> ((char) 42), "");
+static_assert (check <char> ((char) 42), "");
+static_assert (check <unsigned char> ((char) 42), "");
+#if __SIZEOF_INT__ == __SIZEOF_FLOAT__
+static_assert (check <int> (2.5f), "");
+static_assert (check <unsigned int> (136.5f), "");
+#endif
+#if __SIZEOF_LONG_LONG__ == __SIZEOF_DOUBLE__
+static_assert (check <long long> (2.5), "");
+static_assert (check <long long unsigned> (123456.75), "");
+#endif
+
+static_assert (check <B> (A{ 1, 2, 3 }), "");
+static_assert (check <A> (B{ 4, 5, 6 }), "");
+
+#if __SIZEOF_INT__ == 4
+static_assert (check <C> (A{ 7, 8, 9 }), "");
+static_assert (check <C> (B{ 10, 11, 12 }), "");
+static_assert (check <A> (C{ { { { 13, 14 }, { 15, 16 }, { 17, 18 } },
+ { { 19, 20 }, { 21, 22 }, { 23, 24 } } } }), "");
+constexpr unsigned char c[] = { 1, 2, 3, 4 };
+#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
+static_assert (bit_cast <unsigned int> (c) == 0x04030201U, "");
+#elif __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
+static_assert (bit_cast <unsigned int> (c) == 0x01020304U, "");
+#endif
+
+#if __cplusplus >= 201703L
+static_assert (check <G> (H { 0x12345678, { 0x23456789, 0x5a876543 }, 0x3ba78654 }), "");
+#endif
+constexpr int d[] = { 0x12345678, 0x23456789, 0x5a876543, 0x3ba78654 };
+static_assert (bit_cast <G> (d) == bit_cast <G> (H { 0x12345678, { 0x23456789, 0x5a876543 }, 0x3ba78654 }), "");
+
+#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
+static_assert (bit_cast <I> (A { 0x7efa3412, 0x5a876543, 0x1eeffeed })
+ == I { 0x7efa3412, 3, 0x50eca8, 0xb, 0x1eeffeed }, "");
+static_assert (bit_cast <A> (I { 0x7efa3412, 3, 0x50eca8, 0xb, 0x1eeffeed })
+ == A { 0x7efa3412, 0x5a876543, 0x1eeffeed }, "");
+#elif __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
+static_assert (bit_cast <I> (A { 0x7efa3412, 0x5a876543, 0x1eeffeed })
+ == I { 0x7efa3412, 2, -0x2bc4d6, 0x3, 0x1eeffeed }, "");
+static_assert (bit_cast <A> (I { 0x7efa3412, 2, -0x2bc4d6, 0x3, 0x1eeffeed })
+ == A { 0x7efa3412, 0x5a876543, 0x1eeffeed }, "");
+#endif
+#endif
+
+#if 2 * __SIZEOF_INT__ == __SIZEOF_LONG_LONG__ && __SIZEOF_INT__ >= 4
+constexpr unsigned long long a = 0xdeadbeeffee1deadULL;
+constexpr unsigned b[] = { 0xfeedbacU, 0xbeeffeedU };
+#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
+static_assert (bit_cast <D> (a) == D { int (0xfee1deadU), int (0xdeadbeefU) }, "");
+static_assert (bit_cast <unsigned long long> (b) == 0xbeeffeed0feedbacULL, "");
+#elif __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
+static_assert (bit_cast <D> (a) == D { int (0xdeadbeefU), int (0xfee1deadU) }, "");
+static_assert (bit_cast <unsigned long long> (b) == 0x0feedbacbeeffeedULL, "");
+#endif
+#endif
+
+#if __SIZEOF_INT__ == 4 && __SIZEOF_LONG_LONG__ == 8
+#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
+static_assert (bit_cast <J> (K { 0x0feedbacdeadbeefLL, 7862463375103529997LL, 0x0feedbacdeadbeefLL })
+ == J { 0x0feedbacdeadbeefLL, -1011, 2, -0xbacdeadbeLL, -1, -303, 1, 0x0feedbacdeadbeefLL }, "");
+static_assert (bit_cast <K> (J { 0x0feedbacdeadbeefLL, -1011, 2, -0xbacdeadbeLL, -1, -303, 1, 0x0feedbacdeadbeefLL })
+ == K { 0x0feedbacdeadbeefLL, 7862463375103529997LL, 0x0feedbacdeadbeefLL }, "");
+static_assert (bit_cast <M> (N { 0xfeedbacdeadbeef8ULL, 0x123456789abcde42ULL })
+ == M { -8, 59, 31, -5, 234, 0xfeedbacdU, 0x123456789abcde42ULL }, "");
+static_assert (bit_cast <N> (M { -8, 59, 31, -5, 234, 0xfeedbacdU, 0x123456789abcde42ULL })
+ == N { 0xfeedbacdeadbeef8ULL, 0x123456789abcde42ULL }, "");
+#elif __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
+static_assert (bit_cast <J> (K { 0x0feedbacdeadbeefLL, -9103311533965288635LL, 0x0feedbacdeadbeefLL })
+ == J { 0x0feedbacdeadbeefLL, -1011, 2, -0xbacdeadbeLL, -1, -303, 1, 0x0feedbacdeadbeefLL }, "");
+static_assert (bit_cast <K> (J { 0x0feedbacdeadbeefLL, -1011, 2, -0xbacdeadbeLL, -1, -303, 1, 0x0feedbacdeadbeefLL })
+ == K { 0x0feedbacdeadbeefLL, -9103311533965288635LL, 0x0feedbacdeadbeefLL }, "");
+static_assert (bit_cast <M> (N { 0xfeedbacdeadbeef8ULL, 0x123456789abcde42ULL })
+ == M { -1, -35, -19, -6, 205, 0xeadbeef8U, 0x123456789abcde42ULL }, "");
+static_assert (bit_cast <N> (M { -1, -35, -19, -6, 205, 0xeadbeef8U, 0x123456789abcde42ULL })
+ == N { 0xfeedbacdeadbeef8ULL, 0x123456789abcde42ULL }, "");
+#endif
+#endif
--- /dev/null
+// { dg-do compile { target c++11 } }
+
+template <typename To, typename From>
+constexpr To
+bit_cast (const From &from)
+{
+ return __builtin_bit_cast (To, from);
+}
+// { dg-error "'__builtin_bit_cast' is not a constant expression because 'U' is a union type" "U" { target *-*-* } 7 }
+// { dg-error "'__builtin_bit_cast' is not a constant expression because 'const U' is a union type" "const U" { target *-*-* } 7 }
+// { dg-error "'__builtin_bit_cast' is not a constant expression because 'B' contains a union type" "B" { target *-*-* } 7 }
+// { dg-error "'__builtin_bit_cast' is not a constant expression because 'char\\\*' is a pointer type" "char ptr" { target *-*-* } 7 }
+// { dg-error "'__builtin_bit_cast' is not a constant expression because 'const int\\\*' is a pointer type" "const int ptr" { target *-*-* } 7 }
+// { dg-error "'__builtin_bit_cast' is not a constant expression because 'C' contains a pointer type" "C" { target *-*-* } 7 }
+// { dg-error "'__builtin_bit_cast' is not a constant expression because 'const C' contains a pointer type" "const C" { target *-*-* } 7 }
+// { dg-error "'__builtin_bit_cast' is not a constant expression because 'int D::\\\*' is a pointer to member type" "ptrmem 1" { target *-*-* } 7 }
+// { dg-error "'__builtin_bit_cast' is not a constant expression because 'int \\\(D::\\\*\\\)\\\(\\\) const' is a pointer to member type" "ptrmem 2" { target *-*-* } 7 }
+// { dg-error "'__builtin_bit_cast' is not a constant expression because 'int \\\(D::\\\*\\\)\\\(\\\)' is a pointer to member type" "ptrmem 3" { target *-*-* } 7 }
+
+union U { int u; };
+struct A { int a; U b; };
+struct B : public A { int c; };
+struct C { const int *p; };
+constexpr int a[] = { 1, 2, 3 };
+constexpr const int *b = &a[0];
+constexpr C c = { b };
+struct D { int d; constexpr int foo () const { return 1; } };
+constexpr int D::*d = &D::d;
+constexpr int (D::*e) () const = &D::foo;
+struct E { __INTPTR_TYPE__ e, f; };
+constexpr E f = { 1, 2 };
+constexpr U g { 0 };
+
+constexpr auto z = bit_cast <U> (0);
+constexpr auto y = bit_cast <int> (g);
+constexpr auto x = bit_cast <B> (a);
+constexpr auto w = bit_cast <char *> ((__INTPTR_TYPE__) 0);
+constexpr auto v = bit_cast <__UINTPTR_TYPE__> (b);
+constexpr auto u = bit_cast <C> ((__INTPTR_TYPE__) 0);
+constexpr auto t = bit_cast <__INTPTR_TYPE__> (c);
+constexpr auto s = bit_cast <__INTPTR_TYPE__> (d);
+constexpr auto r = bit_cast <E> (e);
+constexpr auto q = bit_cast <int D::*> ((__INTPTR_TYPE__) 0);
+constexpr auto p = bit_cast <int (D::*) ()> (f);
--- /dev/null
+// { dg-do compile { target { c++20 && { ilp32 || lp64 } } } }
+
+struct A { signed char a, b, c, d, e, f; };
+struct B {};
+struct C { B a, b; short c; B d; };
+struct D { int a : 4, b : 24, c : 4; };
+struct E { B a, b; short c; };
+struct F { B a; signed char b, c; B d; };
+
+constexpr bool
+f1 ()
+{
+ A a;
+ a.c = 23; a.d = 42;
+ C b = __builtin_bit_cast (C, a); // OK
+ return false;
+}
+
+constexpr bool
+f2 ()
+{
+ A a;
+ a.a = 1; a.b = 2; a.c = 3; a.e = 4; a.f = 5;
+ C b = __builtin_bit_cast (C, a); // { dg-error "'__builtin_bit_cast' accessing uninitialized byte at offset 3" }
+ return false;
+}
+
+constexpr bool
+f3 ()
+{
+ D a;
+ a.b = 1;
+ F b = __builtin_bit_cast (F, a); // OK
+ return false;
+}
+
+constexpr bool
+f4 ()
+{
+ D a;
+ a.b = 1; a.c = 2;
+ E b = __builtin_bit_cast (E, a); // OK
+ return false;
+}
+
+constexpr bool
+f5 ()
+{
+ D a;
+ a.b = 1;
+ E b = __builtin_bit_cast (E, a); // { dg-error "'__builtin_bit_cast' accessing uninitialized byte at offset 3" }
+ return false;
+}
+
+constexpr bool
+f6 ()
+{
+ D a;
+ a.c = 1;
+ E b = __builtin_bit_cast (E, a); // { dg-error "'__builtin_bit_cast' accessing uninitialized byte at offset 2" }
+ return false;
+}
+
+constexpr bool a = f1 ();
+constexpr bool b = f2 ();
+constexpr bool c = f3 ();
+constexpr bool d = f4 ();
+constexpr bool e = f5 ();
+constexpr bool f = f6 ();