/* Convert function calls to rtl insns, for GNU C compiler.
- Copyright (C) 1989-2017 Free Software Foundation, Inc.
+ Copyright (C) 1989-2020 Free Software Foundation, Inc.
This file is part of GCC.
#include "except.h"
#include "dbgcnt.h"
#include "rtl-iter.h"
-#include "tree-chkp.h"
#include "tree-vrp.h"
#include "tree-ssanames.h"
-#include "rtl-chkp.h"
+#include "tree-ssa-strlen.h"
#include "intl.h"
#include "stringpool.h"
+#include "hash-map.h"
+#include "hash-traits.h"
#include "attribs.h"
#include "builtins.h"
+#include "gimple-fold.h"
/* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
#define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
/* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
form for emit_group_move. */
rtx parallel_value;
- /* If value is passed in neither reg nor stack, this field holds a number
- of a special slot to be used. */
- rtx special_slot;
- /* For pointer bounds hold an index of parm bounds are bound to. -1 if
- there is no such pointer. */
- int pointer_arg;
- /* If pointer_arg refers a structure, then pointer_offset holds an offset
- of a pointer in this structure. */
- int pointer_offset;
/* If REG was promoted from the actual mode of the argument expression,
indicates whether the promotion is sign- or zero-extended. */
int unsignedp;
static int stack_arg_under_construction;
static void precompute_register_parameters (int, struct arg_data *, int *);
-static void store_bounds (struct arg_data *, struct arg_data *);
static int store_one_arg (struct arg_data *, rtx, int, int, int);
static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
static int finalize_must_preallocate (int, int, struct arg_data *,
It is zero if this call doesn't want a structure value.
NEXT_ARG_REG is the rtx that results from executing
- targetm.calls.function_arg (&args_so_far, VOIDmode, void_type_node, true)
+ targetm.calls.function_arg (&args_so_far,
+ function_arg_info::end_marker ());
just after all the args have had their registers assigned.
This could be whatever you like, but normally it is the first
arg-register beyond those used for args in this call,
tree funtype ATTRIBUTE_UNUSED,
poly_int64 stack_size ATTRIBUTE_UNUSED,
poly_int64 rounded_stack_size,
- HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
+ poly_int64 struct_value_size ATTRIBUTE_UNUSED,
rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
next_arg_reg, NULL_RTX);
else
pat = targetm.gen_sibcall (funmem, rounded_stack_size_rtx,
- next_arg_reg, GEN_INT (struct_value_size));
+ next_arg_reg,
+ gen_int_mode (struct_value_size, Pmode));
}
/* If the target has "call" or "call_value" insns, then prefer them
if no arguments are actually popped. If the target does not have
next_arg_reg, NULL_RTX);
else
pat = targetm.gen_call (funmem, rounded_stack_size_rtx, next_arg_reg,
- GEN_INT (struct_value_size));
+ gen_int_mode (struct_value_size, Pmode));
}
emit_insn (pat);
&& MEM_EXPR (funmem) != NULL_TREE)
set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
- /* Mark instrumented calls. */
- if (call && fntree)
- CALL_EXPR_WITH_BOUNDS_P (call) = CALL_WITH_BOUNDS_P (fntree);
-
/* Put the register usage information there. */
add_function_usage_to (call_insn, call_fusage);
{
tree name_decl = DECL_NAME (fndecl);
- /* For instrumentation clones we want to derive flags
- from the original name. */
- if (cgraph_node::get (fndecl)
- && cgraph_node::get (fndecl)->instrumentation_clone)
- name_decl = DECL_NAME (cgraph_node::get (fndecl)->orig_decl);
-
if (fndecl && name_decl
&& IDENTIFIER_LENGTH (name_decl) <= 11
/* Exclude functions not at the file scope, or not `extern',
return false;
fndecl = gimple_call_fndecl (stmt);
- if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
+ if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
switch (DECL_FUNCTION_CODE (fndecl))
{
CASE_BUILT_IN_ALLOCA:
- return true;
+ return gimple_call_num_args (stmt) > 0;
default:
break;
}
return flags;
}
-/* Return true if TYPE should be passed by invisible reference. */
+/* Return true if ARG should be passed by invisible reference. */
bool
-pass_by_reference (CUMULATIVE_ARGS *ca, machine_mode mode,
- tree type, bool named_arg)
+pass_by_reference (CUMULATIVE_ARGS *ca, function_arg_info arg)
{
- if (type)
+ if (tree type = arg.type)
{
/* If this type contains non-trivial constructors, then it is
forbidden for the middle-end to create any new copies. */
return true;
/* GCC post 3.4 passes *all* variable sized types by reference. */
- if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
+ if (!TYPE_SIZE (type) || !poly_int_tree_p (TYPE_SIZE (type)))
return true;
/* If a record type should be passed the same as its first (and only)
member, use the type and mode of that member. */
if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
{
- type = TREE_TYPE (first_field (type));
- mode = TYPE_MODE (type);
+ arg.type = TREE_TYPE (first_field (type));
+ arg.mode = TYPE_MODE (arg.type);
}
}
- return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
- type, named_arg);
+ return targetm.calls.pass_by_reference (pack_cumulative_args (ca), arg);
+}
+
+/* Return true if TYPE should be passed by reference when passed to
+ the "..." arguments of a function. */
+
+bool
+pass_va_arg_by_reference (tree type)
+{
+ return pass_by_reference (NULL, function_arg_info (type, /*named=*/false));
}
-/* Return true if TYPE, which is passed by reference, should be callee
+/* Decide whether ARG, which occurs in the state described by CA,
+ should be passed by reference. Return true if so and update
+ ARG accordingly. */
+
+bool
+apply_pass_by_reference_rules (CUMULATIVE_ARGS *ca, function_arg_info &arg)
+{
+ if (pass_by_reference (ca, arg))
+ {
+ arg.type = build_pointer_type (arg.type);
+ arg.mode = TYPE_MODE (arg.type);
+ arg.pass_by_reference = true;
+ return true;
+ }
+ return false;
+}
+
+/* Return true if ARG, which is passed by reference, should be callee
copied instead of caller copied. */
bool
-reference_callee_copied (CUMULATIVE_ARGS *ca, machine_mode mode,
- tree type, bool named_arg)
+reference_callee_copied (CUMULATIVE_ARGS *ca, const function_arg_info &arg)
{
- if (type && TREE_ADDRESSABLE (type))
+ if (arg.type && TREE_ADDRESSABLE (arg.type))
return false;
- return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
- named_arg);
+ return targetm.calls.callee_copies (pack_cumulative_args (ca), arg);
}
static tree
alloc_max_size (void)
{
- if (!alloc_object_size_limit)
- {
- alloc_object_size_limit = max_object_size ();
+ if (alloc_object_size_limit)
+ return alloc_object_size_limit;
- if (warn_alloc_size_limit)
- {
- char *end = NULL;
- errno = 0;
- unsigned HOST_WIDE_INT unit = 1;
- unsigned HOST_WIDE_INT limit
- = strtoull (warn_alloc_size_limit, &end, 10);
+ HOST_WIDE_INT limit = warn_alloc_size_limit;
+ if (limit == HOST_WIDE_INT_MAX)
+ limit = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
- if (!errno)
- {
- if (end && *end)
- {
- /* Numeric option arguments are at most INT_MAX. Make it
- possible to specify a larger value by accepting common
- suffixes. */
- if (!strcmp (end, "kB"))
- unit = 1000;
- else if (!strcasecmp (end, "KiB") || strcmp (end, "KB"))
- unit = 1024;
- else if (!strcmp (end, "MB"))
- unit = HOST_WIDE_INT_UC (1000) * 1000;
- else if (!strcasecmp (end, "MiB"))
- unit = HOST_WIDE_INT_UC (1024) * 1024;
- else if (!strcasecmp (end, "GB"))
- unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000;
- else if (!strcasecmp (end, "GiB"))
- unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024;
- else if (!strcasecmp (end, "TB"))
- unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000 * 1000;
- else if (!strcasecmp (end, "TiB"))
- unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024 * 1024;
- else if (!strcasecmp (end, "PB"))
- unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000 * 1000 * 1000;
- else if (!strcasecmp (end, "PiB"))
- unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024 * 1024 * 1024;
- else if (!strcasecmp (end, "EB"))
- unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000 * 1000 * 1000
- * 1000;
- else if (!strcasecmp (end, "EiB"))
- unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024 * 1024 * 1024
- * 1024;
- else
- unit = 0;
- }
+ alloc_object_size_limit = build_int_cst (size_type_node, limit);
- if (unit)
- {
- widest_int w = wi::mul (limit, unit);
- if (w < wi::to_widest (alloc_object_size_limit))
- alloc_object_size_limit
- = wide_int_to_tree (ptrdiff_type_node, w);
- }
- }
- }
- }
return alloc_object_size_limit;
}
bool
get_size_range (tree exp, tree range[2], bool allow_zero /* = false */)
{
+ if (!exp)
+ return false;
+
if (tree_fits_uhwi_p (exp))
{
/* EXP is a constant. */
bool integral = INTEGRAL_TYPE_P (exptype);
wide_int min, max;
- enum value_range_type range_type;
+ enum value_range_kind range_type;
- if (TREE_CODE (exp) == SSA_NAME && integral)
- range_type = get_range_info (exp, &min, &max);
+ if (integral)
+ range_type = determine_value_range (exp, &min, &max);
else
range_type = VR_VARYING;
/* Diagnose a call EXP to function FN decorated with attribute alloc_size
whose argument numbers given by IDX with values given by ARGS exceed
the maximum object size or cause an unsigned oveflow (wrapping) when
- multiplied. When ARGS[0] is null the function does nothing. ARGS[1]
- may be null for functions like malloc, and non-null for those like
- calloc that are decorated with a two-argument attribute alloc_size. */
+ multiplied. FN is null when EXP is a call via a function pointer.
+ When ARGS[0] is null the function does nothing. ARGS[1] may be null
+ for functions like malloc, and non-null for those like calloc that
+ are decorated with a two-argument attribute alloc_size. */
void
maybe_warn_alloc_args_overflow (tree fn, tree exp, tree args[2], int idx[2])
location_t loc = EXPR_LOCATION (exp);
+ tree fntype = fn ? TREE_TYPE (fn) : TREE_TYPE (TREE_TYPE (exp));
bool warned = false;
/* Validate each argument individually. */
friends.
Also avoid issuing the warning for calls to function named
"alloca". */
- if ((DECL_FUNCTION_CODE (fn) == BUILT_IN_ALLOCA
- && IDENTIFIER_LENGTH (DECL_NAME (fn)) != 6)
- || (DECL_FUNCTION_CODE (fn) != BUILT_IN_ALLOCA
- && !lookup_attribute ("returns_nonnull",
- TYPE_ATTRIBUTES (TREE_TYPE (fn)))))
+ if (fn && fndecl_built_in_p (fn, BUILT_IN_ALLOCA)
+ ? IDENTIFIER_LENGTH (DECL_NAME (fn)) != 6
+ : !lookup_attribute ("returns_nonnull",
+ TYPE_ATTRIBUTES (fntype)))
warned = warning_at (loc, OPT_Walloc_zero,
"%Kargument %i value is zero",
exp, idx[i] + 1);
size overflow. There's no good way to detect C++98 here
so avoid diagnosing these calls for all C++ modes. */
if (i == 0
+ && fn
&& !args[1]
&& lang_GNU_CXX ()
- && DECL_IS_OPERATOR_NEW (fn)
+ && DECL_IS_OPERATOR_NEW_P (fn)
&& integer_all_onesp (args[i]))
continue;
wide_int x = wi::to_wide (argrange[0][0], szprec);
wide_int y = wi::to_wide (argrange[1][0], szprec);
- bool vflow;
+ wi::overflow_type vflow;
wide_int prod = wi::umul (x, y, &vflow);
if (vflow)
}
}
- if (warned)
+ if (warned && fn)
{
location_t fnloc = DECL_SOURCE_LOCATION (fn);
get_attr_nonstring_decl (tree expr, tree *ref)
{
tree decl = expr;
+ tree var = NULL_TREE;
if (TREE_CODE (decl) == SSA_NAME)
{
gimple *def = SSA_NAME_DEF_STMT (decl);
|| code == VAR_DECL)
decl = gimple_assign_rhs1 (def);
}
- else if (tree var = SSA_NAME_VAR (decl))
- decl = var;
+ else
+ var = SSA_NAME_VAR (decl);
}
if (TREE_CODE (decl) == ADDR_EXPR)
decl = TREE_OPERAND (decl, 0);
+ /* To simplify calling code, store the referenced DECL regardless of
+ the attribute determined below, but avoid storing the SSA_NAME_VAR
+ obtained above (it's not useful for dataflow purposes). */
if (ref)
*ref = decl;
- if (TREE_CODE (decl) == COMPONENT_REF)
+ /* Use the SSA_NAME_VAR that was determined above to see if it's
+ declared nonstring. Otherwise drill down into the referenced
+ DECL. */
+ if (var)
+ decl = var;
+ else if (TREE_CODE (decl) == ARRAY_REF)
+ decl = TREE_OPERAND (decl, 0);
+ else if (TREE_CODE (decl) == COMPONENT_REF)
decl = TREE_OPERAND (decl, 1);
+ else if (TREE_CODE (decl) == MEM_REF)
+ return get_attr_nonstring_decl (TREE_OPERAND (decl, 0), ref);
if (DECL_P (decl)
&& lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
void
maybe_warn_nonstring_arg (tree fndecl, tree exp)
{
- if (!fndecl || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
+ if (!fndecl || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
return;
- bool with_bounds = CALL_WITH_BOUNDS_P (exp);
+ if (TREE_NO_WARNING (exp) || !warn_stringop_overflow)
+ return;
+
+ /* Avoid clearly invalid calls (more checking done below). */
+ unsigned nargs = call_expr_nargs (exp);
+ if (!nargs)
+ return;
/* The bound argument to a bounded string function like strncpy. */
tree bound = NULL_TREE;
+ /* The longest known or possible string argument to one of the comparison
+ functions. If the length is less than the bound it is used instead.
+ Since the length is only used for warning and not for code generation
+ disable strict mode in the calls to get_range_strlen below. */
+ tree maxlen = NULL_TREE;
+
/* It's safe to call "bounded" string functions with a non-string
argument since the functions provide an explicit bound for this
- purpose. */
- switch (DECL_FUNCTION_CODE (fndecl))
+ purpose. The exception is strncat where the bound may refer to
+ either the destination or the source. */
+ int fncode = DECL_FUNCTION_CODE (fndecl);
+ switch (fncode)
{
- case BUILT_IN_STPNCPY:
- case BUILT_IN_STPNCPY_CHK:
+ case BUILT_IN_STRCMP:
case BUILT_IN_STRNCMP:
case BUILT_IN_STRNCASECMP:
+ {
+ /* For these, if one argument refers to one or more of a set
+ of string constants or arrays of known size, determine
+ the range of their known or possible lengths and use it
+ conservatively as the bound for the unbounded function,
+ and to adjust the range of the bound of the bounded ones. */
+ for (unsigned argno = 0;
+ argno < MIN (nargs, 2)
+ && !(maxlen && TREE_CODE (maxlen) == INTEGER_CST); argno++)
+ {
+ tree arg = CALL_EXPR_ARG (exp, argno);
+ if (!get_attr_nonstring_decl (arg))
+ {
+ c_strlen_data lendata = { };
+ /* Set MAXBOUND to an arbitrary non-null non-integer
+ node as a request to have it set to the length of
+ the longest string in a PHI. */
+ lendata.maxbound = arg;
+ get_range_strlen (arg, &lendata, /* eltsize = */ 1);
+ maxlen = lendata.maxbound;
+ }
+ }
+ }
+ /* Fall through. */
+
+ case BUILT_IN_STRNCAT:
+ case BUILT_IN_STPNCPY:
case BUILT_IN_STRNCPY:
- case BUILT_IN_STRNCPY_CHK:
- bound = CALL_EXPR_ARG (exp, with_bounds ? 4 : 2);
+ if (nargs > 2)
+ bound = CALL_EXPR_ARG (exp, 2);
break;
case BUILT_IN_STRNDUP:
- bound = CALL_EXPR_ARG (exp, with_bounds ? 2 : 1);
+ if (nargs > 1)
+ bound = CALL_EXPR_ARG (exp, 1);
break;
+ case BUILT_IN_STRNLEN:
+ {
+ tree arg = CALL_EXPR_ARG (exp, 0);
+ if (!get_attr_nonstring_decl (arg))
+ {
+ c_strlen_data lendata = { };
+ /* Set MAXBOUND to an arbitrary non-null non-integer
+ node as a request to have it set to the length of
+ the longest string in a PHI. */
+ lendata.maxbound = arg;
+ get_range_strlen (arg, &lendata, /* eltsize = */ 1);
+ maxlen = lendata.maxbound;
+ }
+ if (nargs > 1)
+ bound = CALL_EXPR_ARG (exp, 1);
+ break;
+ }
+
default:
break;
}
/* Determine the range of the bound argument (if specified). */
tree bndrng[2] = { NULL_TREE, NULL_TREE };
if (bound)
- get_size_range (bound, bndrng);
+ {
+ STRIP_NOPS (bound);
+ get_size_range (bound, bndrng);
+ }
+
+ location_t loc = EXPR_LOCATION (exp);
+
+ if (bndrng[0])
+ {
+ /* Diagnose excessive bound prior the adjustment below and
+ regardless of attribute nonstring. */
+ tree maxobjsize = max_object_size ();
+ if (tree_int_cst_lt (maxobjsize, bndrng[0]))
+ {
+ if (tree_int_cst_equal (bndrng[0], bndrng[1]))
+ warning_at (loc, OPT_Wstringop_overflow_,
+ "%K%qD specified bound %E "
+ "exceeds maximum object size %E",
+ exp, fndecl, bndrng[0], maxobjsize);
+ else
+ warning_at (loc, OPT_Wstringop_overflow_,
+ "%K%qD specified bound [%E, %E] "
+ "exceeds maximum object size %E",
+ exp, fndecl, bndrng[0], bndrng[1], maxobjsize);
+ return;
+ }
+ }
+
+ if (maxlen && !integer_all_onesp (maxlen))
+ {
+ /* Add one for the nul. */
+ maxlen = const_binop (PLUS_EXPR, TREE_TYPE (maxlen), maxlen,
+ size_one_node);
+
+ if (!bndrng[0])
+ {
+ /* Conservatively use the upper bound of the lengths for
+ both the lower and the upper bound of the operation. */
+ bndrng[0] = maxlen;
+ bndrng[1] = maxlen;
+ bound = void_type_node;
+ }
+ else if (maxlen)
+ {
+ /* Replace the bound on the operation with the upper bound
+ of the length of the string if the latter is smaller. */
+ if (tree_int_cst_lt (maxlen, bndrng[0]))
+ bndrng[0] = maxlen;
+ else if (tree_int_cst_lt (maxlen, bndrng[1]))
+ bndrng[1] = maxlen;
+ }
+ }
/* Iterate over the built-in function's formal arguments and check
each const char* against the actual argument. If the actual
for (unsigned argno = 0; ; ++argno, function_args_iter_next (&it))
{
+ /* Avoid iterating past the declared argument in a call
+ to function declared without a prototype. */
+ if (argno >= nargs)
+ break;
+
tree argtype = function_args_iter_cond (&it);
if (!argtype)
break;
if (!decl)
continue;
- tree type = TREE_TYPE (decl);
-
+ /* The maximum number of array elements accessed. */
offset_int wibnd = 0;
- if (bndrng[0])
+
+ if (argno && fncode == BUILT_IN_STRNCAT)
+ {
+ /* See if the bound in strncat is derived from the length
+ of the strlen of the destination (as it's expected to be).
+ If so, reset BOUND and FNCODE to trigger a warning. */
+ tree dstarg = CALL_EXPR_ARG (exp, 0);
+ if (is_strlen_related_p (dstarg, bound))
+ {
+ /* The bound applies to the destination, not to the source,
+ so reset these to trigger a warning without mentioning
+ the bound. */
+ bound = NULL;
+ fncode = 0;
+ }
+ else if (bndrng[1])
+ /* Use the upper bound of the range for strncat. */
+ wibnd = wi::to_offset (bndrng[1]);
+ }
+ else if (bndrng[0])
+ /* Use the lower bound of the range for functions other than
+ strncat. */
wibnd = wi::to_offset (bndrng[0]);
+ /* Determine the size of the argument array if it is one. */
offset_int asize = wibnd;
+ bool known_size = false;
+ tree type = TREE_TYPE (decl);
+ /* Determine the array size. For arrays of unknown bound and
+ pointers reset BOUND to trigger the appropriate warning. */
if (TREE_CODE (type) == ARRAY_TYPE)
- if (tree arrbnd = TYPE_DOMAIN (type))
- {
- if ((arrbnd = TYPE_MAX_VALUE (arrbnd)))
- asize = wi::to_offset (arrbnd) + 1;
- }
-
- location_t loc = EXPR_LOCATION (exp);
+ {
+ if (tree arrbnd = TYPE_DOMAIN (type))
+ {
+ if ((arrbnd = TYPE_MAX_VALUE (arrbnd)))
+ {
+ asize = wi::to_offset (arrbnd) + 1;
+ known_size = true;
+ }
+ }
+ else if (bound == void_type_node)
+ bound = NULL_TREE;
+ }
+ else if (bound == void_type_node)
+ bound = NULL_TREE;
+
+ /* In a call to strncat with a bound in a range whose lower but
+ not upper bound is less than the array size, reset ASIZE to
+ be the same as the bound and the other variable to trigger
+ the apprpriate warning below. */
+ if (fncode == BUILT_IN_STRNCAT
+ && bndrng[0] != bndrng[1]
+ && wi::ltu_p (wi::to_offset (bndrng[0]), asize)
+ && (!known_size
+ || wi::ltu_p (asize, wibnd)))
+ {
+ asize = wibnd;
+ bound = NULL_TREE;
+ fncode = 0;
+ }
bool warned = false;
+ auto_diagnostic_group d;
if (wi::ltu_p (asize, wibnd))
- warned = warning_at (loc, OPT_Wstringop_overflow_,
- "%qD argument %i declared attribute %<nonstring%> "
- "is smaller than the specified bound %E",
- fndecl, argno + 1, bndrng[0]);
+ {
+ if (bndrng[0] == bndrng[1])
+ warned = warning_at (loc, OPT_Wstringop_overflow_,
+ "%qD argument %i declared attribute "
+ "%<nonstring%> is smaller than the specified "
+ "bound %wu",
+ fndecl, argno + 1, wibnd.to_uhwi ());
+ else if (wi::ltu_p (asize, wi::to_offset (bndrng[0])))
+ warned = warning_at (loc, OPT_Wstringop_overflow_,
+ "%qD argument %i declared attribute "
+ "%<nonstring%> is smaller than "
+ "the specified bound [%E, %E]",
+ fndecl, argno + 1, bndrng[0], bndrng[1]);
+ else
+ warned = warning_at (loc, OPT_Wstringop_overflow_,
+ "%qD argument %i declared attribute "
+ "%<nonstring%> may be smaller than "
+ "the specified bound [%E, %E]",
+ fndecl, argno + 1, bndrng[0], bndrng[1]);
+ }
+ else if (fncode == BUILT_IN_STRNCAT)
+ ; /* Avoid warning for calls to strncat() when the bound
+ is equal to the size of the non-string argument. */
else if (!bound)
warned = warning_at (loc, OPT_Wstringop_overflow_,
"%qD argument %i declared attribute %<nonstring%>",
error_at (EXPR_LOCATION (call_expr), "cannot tail-call: %s", reason);
}
+/* Used to define rdwr_map below. */
+struct rdwr_access_hash: int_hash<int, -1> { };
+
+/* A mapping between argument number corresponding to attribute access
+ mode (read_only, write_only, or read_write) and operands. */
+typedef hash_map<rdwr_access_hash, attr_access> rdwr_map;
+
+/* Initialize a mapping for a call to function FNDECL declared with
+ attribute access. Each attribute poisitional operand inserts one
+ entry into the mapping with the operand number as the key. */
+
+static void
+init_attr_rdwr_indices (rdwr_map *rwm, tree fntype)
+{
+ if (!fntype)
+ return;
+
+ tree access = TYPE_ATTRIBUTES (fntype);
+ /* If the function's type has no attributes there's nothing to do. */
+ if (!access)
+ return;
+
+ access = lookup_attribute ("access", access);
+ if (!access)
+ return;
+
+ tree mode = TREE_VALUE (access);
+ gcc_assert (TREE_CODE (mode) == STRING_CST);
+ const char *modestr = TREE_STRING_POINTER (mode);
+ for (const char *m = modestr; *m; )
+ {
+ attr_access acc = { };
+
+ switch (*m)
+ {
+ case 'r': acc.mode = acc.read_only; break;
+ case 'w': acc.mode = acc.write_only; break;
+ default: acc.mode = acc.read_write; break;
+ }
+
+ char *end;
+ acc.ptrarg = strtoul (++m, &end, 10);
+ m = end;
+ if (*m == ',')
+ {
+ acc.sizarg = strtoul (++m, &end, 10);
+ m = end;
+ }
+ else
+ acc.sizarg = UINT_MAX;
+
+ acc.ptr = NULL_TREE;
+ acc.size = NULL_TREE;
+
+ /* Unconditionally add an entry for the required pointer
+ operand of the attribute, and one for the optional size
+ operand when it's specified. */
+ rwm->put (acc.ptrarg, acc);
+ if (acc.sizarg != UINT_MAX)
+ rwm->put (acc.sizarg, acc);
+ }
+}
+
+/* Returns the type of the argument ARGNO to function with type FNTYPE
+ or null when the typoe cannot be determined or no such argument exists. */
+
+static tree
+fntype_argno_type (tree fntype, unsigned argno)
+{
+ if (!prototype_p (fntype))
+ return NULL_TREE;
+
+ tree argtype;
+ function_args_iterator it;
+ FOREACH_FUNCTION_ARGS (fntype, argtype, it)
+ if (argno-- == 0)
+ return argtype;
+
+ return NULL_TREE;
+}
+
+/* Helper to append the "rdwr" attribute specification described
+ by ACCESS to the array ATTRSTR with size STRSIZE. Used in
+ diagnostics. */
+
+static inline void
+append_attrname (const std::pair<int, attr_access> &access,
+ char *attrstr, size_t strsize)
+{
+ /* Append the relevant attribute to the string. This (deliberately)
+ appends the attribute pointer operand even when none was specified. */
+ size_t len = strlen (attrstr);
+
+ const char *atname
+ = (access.second.mode == attr_access::read_only
+ ? "read_only"
+ : (access.second.mode == attr_access::write_only
+ ? "write_only" : "read_write"));
+
+ const char *sep = len ? ", " : "";
+
+ if (access.second.sizarg == UINT_MAX)
+ snprintf (attrstr + len, strsize - len,
+ "%s%s (%i)", sep, atname,
+ access.second.ptrarg + 1);
+ else
+ snprintf (attrstr + len, strsize - len,
+ "%s%s (%i, %i)", sep, atname,
+ access.second.ptrarg + 1, access.second.sizarg + 1);
+}
+
+/* Iterate over attribute access read-only, read-write, and write-only
+ arguments and diagnose past-the-end accesses and related problems
+ in the function call EXP. */
+
+static void
+maybe_warn_rdwr_sizes (rdwr_map *rwm, tree exp)
+{
+ tree fndecl = NULL_TREE;
+ tree fntype = NULL_TREE;
+ if (tree fnaddr = CALL_EXPR_FN (exp))
+ {
+ if (TREE_CODE (fnaddr) == ADDR_EXPR)
+ {
+ fndecl = TREE_OPERAND (fnaddr, 0);
+ fntype = TREE_TYPE (fndecl);
+ }
+ else
+ fntype = TREE_TYPE (TREE_TYPE (fnaddr));
+ }
+
+ if (!fntype)
+ return;
+
+ /* A string describing the attributes that the warnings issued by this
+ function apply to. Used to print one informational note per function
+ call, rather than one per warning. That reduces clutter. */
+ char attrstr[80];
+ attrstr[0] = 0;
+
+ for (rdwr_map::iterator it = rwm->begin (); it != rwm->end (); ++it)
+ {
+ std::pair<int, attr_access> access = *it;
+
+ /* Get the function call arguments corresponding to the attribute's
+ positional arguments. When both arguments have been specified
+ there will be two entries in *RWM, one for each. They are
+ cross-referenced by their respective argument numbers in
+ ACCESS.PTRARG and ACCESS.SIZARG. */
+ const int ptridx = access.second.ptrarg;
+ const int sizidx = access.second.sizarg;
+
+ gcc_assert (ptridx != -1);
+ gcc_assert (access.first == ptridx || access.first == sizidx);
+
+ /* The pointer is set to null for the entry corresponding to
+ the size argument. Skip it. It's handled when the entry
+ corresponding to the pointer argument comes up. */
+ if (!access.second.ptr)
+ continue;
+
+ tree argtype = fntype_argno_type (fntype, ptridx);
+ argtype = TREE_TYPE (argtype);
+
+ tree size;
+ if (sizidx == -1)
+ {
+ /* If only the pointer attribute operand was specified
+ and not size, set SIZE to the size of one element of
+ the pointed to type to detect smaller objects (null
+ pointers are diagnosed in this case only if
+ the pointer is also declared with attribute nonnull. */
+ size = size_one_node;
+ }
+ else
+ size = rwm->get (sizidx)->size;
+
+ tree ptr = access.second.ptr;
+ tree sizrng[2] = { size_zero_node, build_all_ones_cst (sizetype) };
+ if (get_size_range (size, sizrng, true)
+ && tree_int_cst_sgn (sizrng[0]) < 0
+ && tree_int_cst_sgn (sizrng[1]) < 0)
+ {
+ /* Warn about negative sizes. */
+ bool warned = false;
+ location_t loc = EXPR_LOCATION (exp);
+ if (tree_int_cst_equal (sizrng[0], sizrng[1]))
+ warned = warning_at (loc, OPT_Wstringop_overflow_,
+ "%Kargument %i value %E is negative",
+ exp, sizidx + 1, size);
+ else
+ warned = warning_at (loc, OPT_Wstringop_overflow_,
+ "%Kargument %i range [%E, %E] is negative",
+ exp, sizidx + 1, sizrng[0], sizrng[1]);
+ if (warned)
+ {
+ append_attrname (access, attrstr, sizeof attrstr);
+ /* Avoid warning again for the same attribute. */
+ continue;
+ }
+ }
+
+ if (tree_int_cst_sgn (sizrng[0]) >= 0)
+ {
+ if (COMPLETE_TYPE_P (argtype))
+ {
+ /* Multiple SIZE by the size of the type the pointer
+ argument points to. If it's incomplete the size
+ is used as is. */
+ size = NULL_TREE;
+ if (tree argsize = TYPE_SIZE_UNIT (argtype))
+ if (TREE_CODE (argsize) == INTEGER_CST)
+ {
+ const int prec = TYPE_PRECISION (sizetype);
+ wide_int minsize = wi::to_wide (sizrng[0], prec);
+ minsize *= wi::to_wide (argsize, prec);
+ size = wide_int_to_tree (sizetype, minsize);
+ }
+ }
+ }
+ else
+ size = NULL_TREE;
+
+ if (sizidx >= 0
+ && integer_zerop (ptr)
+ && tree_int_cst_sgn (sizrng[0]) > 0)
+ {
+ /* Warn about null pointers with positive sizes. This is
+ different from also declaring the pointer argument with
+ attribute nonnull when the function accepts null pointers
+ only when the corresponding size is zero. */
+ bool warned = false;
+ location_t loc = EXPR_LOCATION (exp);
+ if (tree_int_cst_equal (sizrng[0], sizrng[1]))
+ warned = warning_at (loc, OPT_Wnonnull,
+ "%Kargument %i is null but the corresponding "
+ "size argument %i value is %E",
+ exp, ptridx + 1, sizidx + 1, size);
+ else
+ warned = warning_at (loc, OPT_Wnonnull,
+ "%Kargument %i is null but the corresponding "
+ "size argument %i range is [%E, %E]",
+ exp, ptridx + 1, sizidx + 1,
+ sizrng[0], sizrng[1]);
+ if (warned)
+ {
+ append_attrname (access, attrstr, sizeof attrstr);
+ /* Avoid warning again for the same attribute. */
+ continue;
+ }
+ }
+
+ tree objsize = compute_objsize (ptr, 0);
+
+ tree srcsize;
+ if (access.second.mode == attr_access::write_only)
+ {
+ /* For a write-only argument there is no source. */
+ srcsize = NULL_TREE;
+ }
+ else
+ {
+ /* For read-only and read-write attributes also set the source
+ size. */
+ srcsize = objsize;
+ if (access.second.mode == attr_access::read_only)
+ {
+ /* For a read-only attribute there is no destination so
+ clear OBJSIZE. This emits "reading N bytes" kind of
+ diagnostics instead of the "writing N bytes" kind. */
+ objsize = NULL_TREE;
+ }
+ }
+
+ /* Clear the no-warning bit in case it was set in a prior
+ iteration so that accesses via different arguments are
+ diagnosed. */
+ TREE_NO_WARNING (exp) = false;
+ check_access (exp, NULL_TREE, NULL_TREE, size, /*maxread=*/ NULL_TREE,
+ srcsize, objsize);
+
+ if (TREE_NO_WARNING (exp))
+ /* If check_access issued a warning above, append the relevant
+ attribute to the string. */
+ append_attrname (access, attrstr, sizeof attrstr);
+ }
+
+ if (!*attrstr)
+ return;
+
+ if (fndecl)
+ inform (DECL_SOURCE_LOCATION (fndecl),
+ "in a call to function %qD declared with attribute %qs",
+ fndecl, attrstr);
+ else
+ inform (EXPR_LOCATION (fndecl),
+ "in a call with type %qT and attribute %qs",
+ fntype, attrstr);
+
+ /* Set the bit in case if was cleared and not set above. */
+ TREE_NO_WARNING (exp) = true;
+}
+
/* Fill in ARGS_SIZE and ARGS array based on the parameters found in
CALL_EXPR EXP.
i = num_actuals - 1;
{
- int j = i, ptr_arg = -1;
+ int j = i;
call_expr_arg_iterator iter;
tree arg;
bitmap slots = NULL;
{
args[j].tree_value = struct_value_addr_value;
j--;
-
- /* If we pass structure address then we need to
- create bounds for it. Since created bounds is
- a call statement, we expand it right here to avoid
- fixing all other places where it may be expanded. */
- if (CALL_WITH_BOUNDS_P (exp))
- {
- args[j].value = gen_reg_rtx (targetm.chkp_bound_mode ());
- args[j].tree_value
- = chkp_make_bounds_for_struct_addr (struct_value_addr_value);
- expand_expr_real (args[j].tree_value, args[j].value, VOIDmode,
- EXPAND_NORMAL, 0, false);
- args[j].pointer_arg = j + 1;
- j--;
- }
}
argpos = 0;
FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
{
tree argtype = TREE_TYPE (arg);
- /* Remember last param with pointer and associate it
- with following pointer bounds. */
- if (CALL_WITH_BOUNDS_P (exp)
- && chkp_type_has_pointer (argtype))
- {
- if (slots)
- BITMAP_FREE (slots);
- ptr_arg = j;
- if (!BOUNDED_TYPE_P (argtype))
- {
- slots = BITMAP_ALLOC (NULL);
- chkp_find_bound_slots (argtype, slots);
- }
- }
- else if (CALL_WITH_BOUNDS_P (exp)
- && pass_by_reference (NULL, TYPE_MODE (argtype), argtype,
- argpos < n_named_args))
- {
- if (slots)
- BITMAP_FREE (slots);
- ptr_arg = j;
- }
- else if (POINTER_BOUNDS_TYPE_P (argtype))
- {
- /* We expect bounds in instrumented calls only.
- Otherwise it is a sign we lost flag due to some optimization
- and may emit call args incorrectly. */
- gcc_assert (CALL_WITH_BOUNDS_P (exp));
-
- /* For structures look for the next available pointer. */
- if (ptr_arg != -1 && slots)
- {
- unsigned bnd_no = bitmap_first_set_bit (slots);
- args[j].pointer_offset =
- bnd_no * POINTER_SIZE / BITS_PER_UNIT;
-
- bitmap_clear_bit (slots, bnd_no);
-
- /* Check we have no more pointers in the structure. */
- if (bitmap_empty_p (slots))
- BITMAP_FREE (slots);
- }
- args[j].pointer_arg = ptr_arg;
-
- /* Check we covered all pointers in the previous
- non bounds arg. */
- if (!slots)
- ptr_arg = -1;
- }
- else
- ptr_arg = -1;
-
if (targetm.calls.split_complex_arg
&& argtype
&& TREE_CODE (argtype) == COMPLEX_TYPE
bitmap_obstack_release (NULL);
- /* Extract attribute alloc_size and if set, store the indices of
- the corresponding arguments in ALLOC_IDX, and then the actual
- argument(s) at those indices in ALLOC_ARGS. */
+ /* Extract attribute alloc_size from the type of the called expression
+ (which could be a function or a function pointer) and if set, store
+ the indices of the corresponding arguments in ALLOC_IDX, and then
+ the actual argument(s) at those indices in ALLOC_ARGS. */
int alloc_idx[2] = { -1, -1 };
- if (tree alloc_size
- = (fndecl ? lookup_attribute ("alloc_size",
- TYPE_ATTRIBUTES (TREE_TYPE (fndecl)))
- : NULL_TREE))
+ if (tree alloc_size = lookup_attribute ("alloc_size",
+ TYPE_ATTRIBUTES (fntype)))
{
tree args = TREE_VALUE (alloc_size);
alloc_idx[0] = TREE_INT_CST_LOW (TREE_VALUE (args)) - 1;
/* Array for up to the two attribute alloc_size arguments. */
tree alloc_args[] = { NULL_TREE, NULL_TREE };
+ /* Map of attribute read_only, write_only, or read_write specifications
+ for function arguments. */
+ rdwr_map rdwr_idx;
+ init_attr_rdwr_indices (&rdwr_idx, fntype);
+
/* I counts args in order (to be) pushed; ARGPOS counts in order written. */
for (argpos = 0; argpos < num_actuals; i--, argpos++)
{
tree type = TREE_TYPE (args[i].tree_value);
int unsignedp;
- machine_mode mode;
/* Replace erroneous argument with constant zero. */
if (type == error_mark_node || !COMPLETE_TYPE_P (type))
/* If TYPE is a transparent union or record, pass things the way
we would pass the first field of the union or record. We have
already verified that the modes are the same. */
- if ((TREE_CODE (type) == UNION_TYPE || TREE_CODE (type) == RECORD_TYPE)
- && TYPE_TRANSPARENT_AGGR (type))
+ if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type))
type = TREE_TYPE (first_field (type));
/* Decide where to pass this arg.
with those made by function.c. */
/* See if this argument should be passed by invisible reference. */
- if (pass_by_reference (args_so_far_pnt, TYPE_MODE (type),
- type, argpos < n_named_args))
+ function_arg_info arg (type, argpos < n_named_args);
+ if (pass_by_reference (args_so_far_pnt, arg))
{
bool callee_copies;
tree base = NULL_TREE;
- callee_copies
- = reference_callee_copied (args_so_far_pnt, TYPE_MODE (type),
- type, argpos < n_named_args);
+ callee_copies = reference_callee_copied (args_so_far_pnt, arg);
/* If we're compiling a thunk, pass through invisible references
instead of making a copy. */
*may_tailcall = false;
maybe_complain_about_tail_call (exp,
"a callee-copied argument is"
- " stored in the current "
+ " stored in the current"
" function's frame");
}
"argument must be passed"
" by copying");
}
+ arg.pass_by_reference = true;
}
unsignedp = TYPE_UNSIGNED (type);
- mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
- fndecl ? TREE_TYPE (fndecl) : fntype, 0);
+ arg.type = type;
+ arg.mode
+ = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
+ fndecl ? TREE_TYPE (fndecl) : fntype, 0);
args[i].unsignedp = unsignedp;
- args[i].mode = mode;
+ args[i].mode = arg.mode;
targetm.calls.warn_parameter_passing_abi (args_so_far, type);
- args[i].reg = targetm.calls.function_arg (args_so_far, mode, type,
- argpos < n_named_args);
+ args[i].reg = targetm.calls.function_arg (args_so_far, arg);
if (args[i].reg && CONST_INT_P (args[i].reg))
- {
- args[i].special_slot = args[i].reg;
- args[i].reg = NULL;
- }
+ args[i].reg = NULL;
/* If this is a sibling call and the machine has register windows, the
register window has to be unwinded before calling the routine, so
arguments have to go into the incoming registers. */
if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
args[i].tail_call_reg
- = targetm.calls.function_incoming_arg (args_so_far, mode, type,
- argpos < n_named_args);
+ = targetm.calls.function_incoming_arg (args_so_far, arg);
else
args[i].tail_call_reg = args[i].reg;
if (args[i].reg)
- args[i].partial
- = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
- argpos < n_named_args);
+ args[i].partial = targetm.calls.arg_partial_bytes (args_so_far, arg);
- args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
+ args[i].pass_on_stack = targetm.calls.must_pass_in_stack (arg);
/* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
it means that we are to pass this arg in the register(s) designated
|| (args[i].pass_on_stack && args[i].reg != 0))
*must_preallocate = 1;
- /* No stack allocation and padding for bounds. */
- if (POINTER_BOUNDS_P (args[i].tree_value))
- ;
/* Compute the stack-size of this argument. */
- else if (args[i].reg == 0 || args[i].partial != 0
+ if (args[i].reg == 0 || args[i].partial != 0
|| reg_parm_stack_space > 0
|| args[i].pass_on_stack)
- locate_and_pad_parm (mode, type,
+ locate_and_pad_parm (arg.mode, type,
#ifdef STACK_PARMS_IN_REG_PARM_AREA
1,
#else
/* The argument is passed entirely in registers. See at which
end it should be padded. */
args[i].locate.where_pad =
- BLOCK_REG_PADDING (mode, type,
+ BLOCK_REG_PADDING (arg.mode, type,
int_size_in_bytes (type) <= UNITS_PER_WORD);
#endif
/* Increment ARGS_SO_FAR, which has info about which arg-registers
have been used, etc. */
- targetm.calls.function_arg_advance (args_so_far, TYPE_MODE (type),
- type, argpos < n_named_args);
+ /* ??? Traditionally we've passed TYPE_MODE here, instead of the
+ promoted_mode used for function_arg above. However, the
+ corresponding handling of incoming arguments in function.c
+ does pass the promoted mode. */
+ arg.mode = TYPE_MODE (type);
+ targetm.calls.function_arg_advance (args_so_far, arg);
/* Store argument values for functions decorated with attribute
alloc_size. */
alloc_args[0] = args[i].tree_value;
else if (argpos == alloc_idx[1])
alloc_args[1] = args[i].tree_value;
+
+ /* Save the actual argument that corresponds to the access attribute
+ operand for later processing. */
+ if (attr_access *access = rdwr_idx.get (argpos))
+ {
+ if (POINTER_TYPE_P (type))
+ {
+ access->ptr = args[i].tree_value;
+ gcc_assert (access->size == NULL_TREE);
+ }
+ else
+ {
+ access->size = args[i].tree_value;
+ gcc_assert (access->ptr == NULL_TREE);
+ }
+ }
}
if (alloc_args[0])
/* Detect passing non-string arguments to functions expecting
nul-terminated strings. */
maybe_warn_nonstring_arg (fndecl, exp);
+
+ /* Check read_only, write_only, and read_write arguments. */
+ maybe_warn_rdwr_sizes (&rdwr_idx, exp);
}
/* Update ARGS_SIZE to contain the total size for the argument block.
partial_seen = 1;
else if (partial_seen && args[i].reg == 0)
must_preallocate = 1;
- /* We preallocate in case there are bounds passed
- in the bounds table to have precomputed address
- for bounds association. */
- else if (POINTER_BOUNDS_P (args[i].tree_value)
- && !args[i].reg)
- must_preallocate = 1;
if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
&& (TREE_CODE (args[i].tree_value) == CALL_EXPR
if (TYPE_EMPTY_P (TREE_TYPE (args[i].tree_value)))
continue;
- /* Pointer Bounds are never passed on the stack. */
- if (POINTER_BOUNDS_P (args[i].tree_value))
- continue;
-
addr = simplify_gen_binary (PLUS, Pmode, arg_reg, offset);
addr = plus_constant (Pmode, addr, arg_offset);
{
int partial = args[i].partial;
int nregs;
- int size = 0;
+ poly_int64 size = 0;
+ HOST_WIDE_INT const_size = 0;
rtx_insn *before_arg = get_last_insn ();
+ tree type = TREE_TYPE (args[i].tree_value);
+ if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type))
+ type = TREE_TYPE (first_field (type));
/* Set non-negative if we must move a word at a time, even if
just one word (e.g, partial == 4 && mode == DFmode). Set
to -1 if we just use a normal move insn. This value can be
gcc_assert (partial % UNITS_PER_WORD == 0);
nregs = partial / UNITS_PER_WORD;
}
- else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
+ else if (TYPE_MODE (type) == BLKmode)
{
- size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
- nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
+ /* Variable-sized parameters should be described by a
+ PARALLEL instead. */
+ const_size = int_size_in_bytes (type);
+ gcc_assert (const_size >= 0);
+ nregs = (const_size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
+ size = const_size;
}
else
size = GET_MODE_SIZE (args[i].mode);
/* Handle case where we have a value that needs shifting
up to the msb. eg. a QImode value and we're padding
upward on a BYTES_BIG_ENDIAN machine. */
- if (size < UNITS_PER_WORD
- && (args[i].locate.where_pad
- == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
+ if (args[i].locate.where_pad
+ == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))
{
- rtx x;
- int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
-
- /* Assigning REG here rather than a temp makes CALL_FUSAGE
- report the whole reg as used. Strictly speaking, the
- call only uses SIZE bytes at the msb end, but it doesn't
- seem worth generating rtl to say that. */
- reg = gen_rtx_REG (word_mode, REGNO (reg));
- x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
- if (x != reg)
- emit_move_insn (reg, x);
+ gcc_checking_assert (ordered_p (size, UNITS_PER_WORD));
+ if (maybe_lt (size, UNITS_PER_WORD))
+ {
+ rtx x;
+ poly_int64 shift
+ = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
+
+ /* Assigning REG here rather than a temp makes
+ CALL_FUSAGE report the whole reg as used.
+ Strictly speaking, the call only uses SIZE
+ bytes at the msb end, but it doesn't seem worth
+ generating rtl to say that. */
+ reg = gen_rtx_REG (word_mode, REGNO (reg));
+ x = expand_shift (LSHIFT_EXPR, word_mode,
+ reg, shift, reg, 1);
+ if (x != reg)
+ emit_move_insn (reg, x);
+ }
}
#endif
}
else if (partial == 0 || args[i].pass_on_stack)
{
+ /* SIZE and CONST_SIZE are 0 for partial arguments and
+ the size of a BLKmode type otherwise. */
+ gcc_checking_assert (known_eq (size, const_size));
rtx mem = validize_mem (copy_rtx (args[i].value));
/* Check for overlap with already clobbered argument area,
providing that this has non-zero size. */
if (is_sibcall
- && size != 0
+ && const_size != 0
&& (mem_might_overlap_already_clobbered_arg_p
- (XEXP (args[i].value, 0), size)))
+ (XEXP (args[i].value, 0), const_size)))
*sibcall_failure = 1;
- if (size % UNITS_PER_WORD == 0
+ if (const_size % UNITS_PER_WORD == 0
|| MEM_ALIGN (mem) % BITS_PER_WORD == 0)
move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
else
args[i].mode);
rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1);
unsigned int bitoff = (nregs - 1) * BITS_PER_WORD;
- unsigned int bitsize = size * BITS_PER_UNIT - bitoff;
+ unsigned int bitsize = const_size * BITS_PER_UNIT - bitoff;
rtx x = extract_bit_field (mem, bitsize, bitoff, 1, dest,
word_mode, word_mode, false,
NULL);
}
/* Handle a BLKmode that needs shifting. */
- if (nregs == 1 && size < UNITS_PER_WORD
+ if (nregs == 1 && const_size < UNITS_PER_WORD
#ifdef BLOCK_REG_PADDING
&& args[i].locate.where_pad == PAD_DOWNWARD
#else
)
{
rtx dest = gen_rtx_REG (word_mode, REGNO (reg));
- int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
+ int shift = (UNITS_PER_WORD - const_size) * BITS_PER_UNIT;
enum tree_code dir = (BYTES_BIG_ENDIAN
? RSHIFT_EXPR : LSHIFT_EXPR);
rtx x;
if (GET_CODE (reg) == PARALLEL)
use_group_regs (call_fusage, reg);
else if (nregs == -1)
- use_reg_mode (call_fusage, reg,
- TYPE_MODE (TREE_TYPE (args[i].tree_value)));
+ use_reg_mode (call_fusage, reg, TYPE_MODE (type));
else if (nregs > 0)
use_regs (call_fusage, REGNO (reg), nregs);
}
bool
shift_return_value (machine_mode mode, bool left_p, rtx value)
{
- HOST_WIDE_INT shift;
-
gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
machine_mode value_mode = GET_MODE (value);
- shift = GET_MODE_BITSIZE (value_mode) - GET_MODE_BITSIZE (mode);
- if (shift == 0)
+ poly_int64 shift = GET_MODE_BITSIZE (value_mode) - GET_MODE_BITSIZE (mode);
+
+ if (known_eq (shift, 0))
return false;
/* Use ashr rather than lshr for right shifts. This is for the benefit
}
#ifdef REG_PARM_STACK_SPACE
- /* If outgoing reg parm stack space changes, we can not do sibcall. */
+ /* If outgoing reg parm stack space changes, we cannot do sibcall. */
if (OUTGOING_REG_PARM_STACK_SPACE (funtype)
!= OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl))
|| (reg_parm_stack_space != REG_PARM_STACK_SPACE (current_function_decl)))
return true;
}
+/* Update stack alignment when the parameter is passed in the stack
+ since the outgoing parameter requires extra alignment on the calling
+ function side. */
+
+static void
+update_stack_alignment_for_call (struct locate_and_pad_arg_data *locate)
+{
+ if (crtl->stack_alignment_needed < locate->boundary)
+ crtl->stack_alignment_needed = locate->boundary;
+ if (crtl->preferred_stack_boundary < locate->boundary)
+ crtl->preferred_stack_boundary = locate->boundary;
+}
+
/* Generate all the code for a CALL_EXPR exp
and return an rtx for its value.
Store the value in TARGET (specified as an rtx) if convenient.
/* Register in which non-BLKmode value will be returned,
or 0 if no value or if value is BLKmode. */
rtx valreg;
- /* Register(s) in which bounds are returned. */
- rtx valbnd = NULL;
/* Address where we should return a BLKmode value;
0 if value not BLKmode. */
rtx structure_value_addr = 0;
/* Size of aggregate value wanted, or zero if none wanted
or if we are using the non-reentrant PCC calling convention
or expecting the value in registers. */
- HOST_WIDE_INT struct_value_size = 0;
+ poly_int64 struct_value_size = 0;
/* Nonzero if called function returns an aggregate in memory PCC style,
by returning the address of where to find it. */
int pcc_struct_value = 0;
}
#else /* not PCC_STATIC_STRUCT_RETURN */
{
- struct_value_size = int_size_in_bytes (rettype);
+ if (!poly_int_tree_p (TYPE_SIZE_UNIT (rettype), &struct_value_size))
+ struct_value_size = -1;
/* Even if it is semantically safe to use the target as the return
slot, it may be not sufficiently aligned for the return type. */
if (CALL_EXPR_RETURN_SLOT_OPT (exp)
&& target
&& MEM_P (target)
- && !(MEM_ALIGN (target) < TYPE_ALIGN (rettype)
- && targetm.slow_unaligned_access (TYPE_MODE (rettype),
- MEM_ALIGN (target))))
+ /* If rettype is addressable, we may not create a temporary.
+ If target is properly aligned at runtime and the compiler
+ just doesn't know about it, it will work fine, otherwise it
+ will be UB. */
+ && (TREE_ADDRESSABLE (rettype)
+ || !(MEM_ALIGN (target) < TYPE_ALIGN (rettype)
+ && targetm.slow_unaligned_access (TYPE_MODE (rettype),
+ MEM_ALIGN (target)))))
structure_value_addr = XEXP (target, 0);
else
{
structure_value_addr_value =
make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
- structure_value_addr_parm = CALL_WITH_BOUNDS_P (exp) ? 2 : 1;
+ structure_value_addr_parm = 1;
}
/* Count the arguments and set NUM_ACTUALS. */
pushed these optimizations into -O2. Don't try if we're already
expanding a call, as that means we're an argument. Don't try if
there's cleanups, as we know there's code to follow the call. */
-
if (currently_expanding_call++ != 0
- || !flag_optimize_sibling_calls
+ || (!flag_optimize_sibling_calls && !CALL_FROM_THUNK_P (exp))
|| args_size.var
|| dbg_cnt (tail_call) == false)
try_tail_call = 0;
+ /* Workaround buggy C/C++ wrappers around Fortran routines with
+ character(len=constant) arguments if the hidden string length arguments
+ are passed on the stack; if the callers forget to pass those arguments,
+ attempting to tail call in such routines leads to stack corruption.
+ Avoid tail calls in functions where at least one such hidden string
+ length argument is passed (partially or fully) on the stack in the
+ caller and the callee needs to pass any arguments on the stack.
+ See PR90329. */
+ if (try_tail_call && maybe_ne (args_size.constant, 0))
+ for (tree arg = DECL_ARGUMENTS (current_function_decl);
+ arg; arg = DECL_CHAIN (arg))
+ if (DECL_HIDDEN_STRING_LENGTH (arg) && DECL_INCOMING_RTL (arg))
+ {
+ subrtx_iterator::array_type array;
+ FOR_EACH_SUBRTX (iter, array, DECL_INCOMING_RTL (arg), NONCONST)
+ if (MEM_P (*iter))
+ {
+ try_tail_call = 0;
+ break;
+ }
+ }
+
/* If the user has marked the function as requiring tail-call
optimization, attempt it. */
if (must_tail_call)
/* Ensure current function's preferred stack boundary is at least
what we need. Stack alignment may also increase preferred stack
boundary. */
+ for (i = 0; i < num_actuals; i++)
+ if (reg_parm_stack_space > 0
+ || args[i].reg == 0
+ || args[i].partial != 0
+ || args[i].pass_on_stack)
+ update_stack_alignment_for_call (&args[i].locate);
if (crtl->preferred_stack_boundary < preferred_stack_boundary)
crtl->preferred_stack_boundary = preferred_stack_boundary;
else
preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
+ if (flag_callgraph_info)
+ record_final_call (fndecl, EXPR_LOCATION (exp));
+
/* We want to make two insn chains; one for a sibling call, the other
for a normal call. We will select one of the two chains after
initial RTL generation is complete. */
for (i = 0; i < num_actuals; i++)
{
- /* Delay bounds until all other args are stored. */
- if (POINTER_BOUNDS_P (args[i].tree_value))
- continue;
- else if (args[i].reg == 0 || args[i].pass_on_stack)
+ if (args[i].reg == 0 || args[i].pass_on_stack)
{
rtx_insn *before_arg = get_last_insn ();
/* Figure out the register where the value, if any, will come back. */
valreg = 0;
- valbnd = 0;
if (TYPE_MODE (rettype) != VOIDmode
&& ! structure_value_addr)
{
if (pcc_struct_value)
- {
- valreg = hard_function_value (build_pointer_type (rettype),
- fndecl, NULL, (pass == 0));
- if (CALL_WITH_BOUNDS_P (exp))
- valbnd = targetm.calls.
- chkp_function_value_bounds (build_pointer_type (rettype),
- fndecl, (pass == 0));
- }
+ valreg = hard_function_value (build_pointer_type (rettype),
+ fndecl, NULL, (pass == 0));
else
- {
- valreg = hard_function_value (rettype, fndecl, fntype,
- (pass == 0));
- if (CALL_WITH_BOUNDS_P (exp))
- valbnd = targetm.calls.chkp_function_value_bounds (rettype,
- fndecl,
- (pass == 0));
- }
+ valreg = hard_function_value (rettype, fndecl, fntype,
+ (pass == 0));
/* If VALREG is a PARALLEL whose first member has a zero
offset, use that. This is for targets such as m68k that
}
}
- /* Store all bounds not passed in registers. */
- for (i = 0; i < num_actuals; i++)
- {
- if (POINTER_BOUNDS_P (args[i].tree_value)
- && !args[i].reg)
- store_bounds (&args[i],
- args[i].pointer_arg == -1
- ? NULL
- : &args[args[i].pointer_arg]);
- }
-
/* If register arguments require space on the stack and stack space
was not preallocated, allocate stack space here for arguments
passed in registers. */
/* Set up next argument register. For sibling calls on machines
with register windows this should be the incoming register. */
if (pass == 0)
- next_arg_reg = targetm.calls.function_incoming_arg (args_so_far,
- VOIDmode,
- void_type_node,
- true);
+ next_arg_reg = targetm.calls.function_incoming_arg
+ (args_so_far, function_arg_info::end_marker ());
else
- next_arg_reg = targetm.calls.function_arg (args_so_far,
- VOIDmode, void_type_node,
- true);
+ next_arg_reg = targetm.calls.function_arg
+ (args_so_far, function_arg_info::end_marker ());
if (pass == 1 && (return_flags & ERF_RETURNS_ARG))
{
emit_move_insn (temp, valreg);
- /* The return value from a malloc-like function can not alias
+ /* The return value from a malloc-like function cannot alias
anything else. */
last = get_last_insn ();
add_reg_note (last, REG_NOALIAS, temp);
free (stack_usage_map_buf);
free (args);
-
- /* Join result with returned bounds so caller may use them if needed. */
- target = chkp_join_splitted_slot (target, valbnd);
-
return target;
}
rtx mem_value = 0;
rtx valreg;
int pcc_struct_value = 0;
- int struct_value_size = 0;
+ poly_int64 struct_value_size = 0;
int flags;
int reg_parm_stack_space = 0;
poly_int64 needed;
argvec[count].mode = Pmode;
argvec[count].partial = 0;
- argvec[count].reg = targetm.calls.function_arg (args_so_far,
- Pmode, NULL_TREE, true);
- gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, Pmode,
- NULL_TREE, 1) == 0);
+ function_arg_info ptr_arg (Pmode, /*named=*/true);
+ argvec[count].reg = targetm.calls.function_arg (args_so_far, ptr_arg);
+ gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, ptr_arg) == 0);
locate_and_pad_parm (Pmode, NULL_TREE,
#ifdef STACK_PARMS_IN_REG_PARM_AREA
|| reg_parm_stack_space > 0)
args_size.constant += argvec[count].locate.size.constant;
- targetm.calls.function_arg_advance (args_so_far, Pmode, (tree) 0, true);
+ targetm.calls.function_arg_advance (args_so_far, ptr_arg);
count++;
}
for (unsigned int i = 0; count < nargs; i++, count++)
{
rtx val = args[i].first;
- machine_mode mode = args[i].second;
+ function_arg_info arg (args[i].second, /*named=*/true);
int unsigned_p = 0;
/* We cannot convert the arg value to the mode the library wants here;
must do it earlier where we know the signedness of the arg. */
- gcc_assert (mode != BLKmode
- && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
+ gcc_assert (arg.mode != BLKmode
+ && (GET_MODE (val) == arg.mode
+ || GET_MODE (val) == VOIDmode));
/* Make sure it is a reasonable operand for a move or push insn. */
if (!REG_P (val) && !MEM_P (val)
- && !(CONSTANT_P (val) && targetm.legitimate_constant_p (mode, val)))
+ && !(CONSTANT_P (val)
+ && targetm.legitimate_constant_p (arg.mode, val)))
val = force_operand (val, NULL_RTX);
- if (pass_by_reference (&args_so_far_v, mode, NULL_TREE, 1))
+ if (pass_by_reference (&args_so_far_v, arg))
{
rtx slot;
- int must_copy
- = !reference_callee_copied (&args_so_far_v, mode, NULL_TREE, 1);
+ int must_copy = !reference_callee_copied (&args_so_far_v, arg);
/* If this was a CONST function, it is now PURE since it now
reads memory. */
}
else
{
- slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
+ slot = assign_temp (lang_hooks.types.type_for_mode (arg.mode, 0),
1, 1);
emit_move_insn (slot, val);
}
slot),
call_fusage);
- mode = Pmode;
+ arg.mode = Pmode;
+ arg.pass_by_reference = true;
val = force_operand (XEXP (slot, 0), NULL_RTX);
}
- mode = promote_function_mode (NULL_TREE, mode, &unsigned_p, NULL_TREE, 0);
- argvec[count].mode = mode;
- argvec[count].value = convert_modes (mode, GET_MODE (val), val, unsigned_p);
- argvec[count].reg = targetm.calls.function_arg (args_so_far, mode,
- NULL_TREE, true);
+ arg.mode = promote_function_mode (NULL_TREE, arg.mode, &unsigned_p,
+ NULL_TREE, 0);
+ argvec[count].mode = arg.mode;
+ argvec[count].value = convert_modes (arg.mode, GET_MODE (val), val,
+ unsigned_p);
+ argvec[count].reg = targetm.calls.function_arg (args_so_far, arg);
argvec[count].partial
- = targetm.calls.arg_partial_bytes (args_so_far, mode, NULL_TREE, 1);
+ = targetm.calls.arg_partial_bytes (args_so_far, arg);
if (argvec[count].reg == 0
|| argvec[count].partial != 0
|| reg_parm_stack_space > 0)
{
- locate_and_pad_parm (mode, NULL_TREE,
+ locate_and_pad_parm (arg.mode, NULL_TREE,
#ifdef STACK_PARMS_IN_REG_PARM_AREA
1,
#else
/* The argument is passed entirely in registers. See at which
end it should be padded. */
argvec[count].locate.where_pad =
- BLOCK_REG_PADDING (mode, NULL_TREE,
- GET_MODE_SIZE (mode) <= UNITS_PER_WORD);
+ BLOCK_REG_PADDING (arg.mode, NULL_TREE,
+ known_le (GET_MODE_SIZE (arg.mode),
+ UNITS_PER_WORD));
#endif
- targetm.calls.function_arg_advance (args_so_far, mode, (tree) 0, true);
+ targetm.calls.function_arg_advance (args_so_far, arg);
}
+ for (int i = 0; i < nargs; i++)
+ if (reg_parm_stack_space > 0
+ || argvec[i].reg == 0
+ || argvec[i].partial != 0)
+ update_stack_alignment_for_call (&argvec[i].locate);
+
/* If this machine requires an external definition for library
functions, write one out. */
assemble_external_libcall (fun);
rtx val = argvec[argnum].value;
rtx reg = argvec[argnum].reg;
int partial = argvec[argnum].partial;
-#ifdef BLOCK_REG_PADDING
- int size = 0;
-#endif
/* Handle calls that pass values in multiple non-contiguous
locations. The PA64 has examples of this for library calls. */
{
emit_move_insn (reg, val);
#ifdef BLOCK_REG_PADDING
- size = GET_MODE_SIZE (argvec[argnum].mode);
+ poly_int64 size = GET_MODE_SIZE (argvec[argnum].mode);
/* Copied from load_register_parameters. */
/* Handle case where we have a value that needs shifting
up to the msb. eg. a QImode value and we're padding
upward on a BYTES_BIG_ENDIAN machine. */
- if (size < UNITS_PER_WORD
+ if (known_lt (size, UNITS_PER_WORD)
&& (argvec[argnum].locate.where_pad
== (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
{
rtx x;
- int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
+ poly_int64 shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
/* Assigning REG here rather than a temp makes CALL_FUSAGE
report the whole reg as used. Strictly speaking, the
before_call = get_last_insn ();
+ if (flag_callgraph_info)
+ record_final_call (SYMBOL_REF_DECL (orgfun), UNKNOWN_LOCATION);
+
/* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
will set inhibit_defer_pop to that value. */
/* The return type is needed to decide how many bytes the function pops.
original_args_size.constant, args_size.constant,
struct_value_size,
targetm.calls.function_arg (args_so_far,
- VOIDmode, void_type_node, true),
+ function_arg_info::end_marker ()),
valreg,
old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
}
\f
-/* Store pointer bounds argument ARG into Bounds Table entry
- associated with PARM. */
-static void
-store_bounds (struct arg_data *arg, struct arg_data *parm)
-{
- rtx slot = NULL, ptr = NULL, addr = NULL;
-
- /* We may pass bounds not associated with any pointer. */
- if (!parm)
- {
- gcc_assert (arg->special_slot);
- slot = arg->special_slot;
- ptr = const0_rtx;
- }
- /* Find pointer associated with bounds and where it is
- passed. */
- else
- {
- if (!parm->reg)
- {
- gcc_assert (!arg->special_slot);
-
- addr = adjust_address (parm->stack, Pmode, arg->pointer_offset);
- }
- else if (REG_P (parm->reg))
- {
- gcc_assert (arg->special_slot);
- slot = arg->special_slot;
-
- if (MEM_P (parm->value))
- addr = adjust_address (parm->value, Pmode, arg->pointer_offset);
- else if (REG_P (parm->value))
- ptr = gen_rtx_SUBREG (Pmode, parm->value, arg->pointer_offset);
- else
- {
- gcc_assert (!arg->pointer_offset);
- ptr = parm->value;
- }
- }
- else
- {
- gcc_assert (GET_CODE (parm->reg) == PARALLEL);
-
- gcc_assert (arg->special_slot);
- slot = arg->special_slot;
-
- if (parm->parallel_value)
- ptr = chkp_get_value_with_offs (parm->parallel_value,
- GEN_INT (arg->pointer_offset));
- else
- gcc_unreachable ();
- }
- }
-
- /* Expand bounds. */
- if (!arg->value)
- arg->value = expand_normal (arg->tree_value);
-
- targetm.calls.store_bounds_for_arg (ptr, addr, arg->value, slot);
-}
-
/* Store a single argument for a function call
into the register or memory area where it must be passed.
*ARG describes the argument value and where to pass it.
rtx x = arg->value;
poly_int64 i = 0;
- if (XEXP (x, 0) == crtl->args.internal_arg_pointer
- || (GET_CODE (XEXP (x, 0)) == PLUS
- && XEXP (XEXP (x, 0), 0) ==
- crtl->args.internal_arg_pointer
- && CONST_INT_P (XEXP (XEXP (x, 0), 1))))
+ if (strip_offset (XEXP (x, 0), &i)
+ == crtl->args.internal_arg_pointer)
{
- if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
- i = rtx_to_poly_int64 (XEXP (XEXP (x, 0), 1));
-
/* arg.locate doesn't contain the pretend_args_size offset,
it's part of argblock. Ensure we don't count it in I. */
if (STACK_GROWS_DOWNWARD)
return sibcall_failure;
}
-/* Nonzero if we do not know how to pass TYPE solely in registers. */
+/* Nonzero if we do not know how to pass ARG solely in registers. */
bool
-must_pass_in_stack_var_size (machine_mode mode ATTRIBUTE_UNUSED,
- const_tree type)
+must_pass_in_stack_var_size (const function_arg_info &arg)
{
- if (!type)
+ if (!arg.type)
return false;
/* If the type has variable size... */
- if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
+ if (!poly_int_tree_p (TYPE_SIZE (arg.type)))
return true;
/* If the type is marked as addressable (it is required
to be constructed into the stack)... */
- if (TREE_ADDRESSABLE (type))
+ if (TREE_ADDRESSABLE (arg.type))
return true;
return false;
/* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
bool
-must_pass_in_stack_var_size_or_pad (machine_mode mode, const_tree type)
+must_pass_in_stack_var_size_or_pad (const function_arg_info &arg)
{
- if (!type)
+ if (!arg.type)
return false;
/* If the type has variable size... */
- if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
+ if (TREE_CODE (TYPE_SIZE (arg.type)) != INTEGER_CST)
return true;
/* If the type is marked as addressable (it is required
to be constructed into the stack)... */
- if (TREE_ADDRESSABLE (type))
+ if (TREE_ADDRESSABLE (arg.type))
return true;
- if (TYPE_EMPTY_P (type))
+ if (TYPE_EMPTY_P (arg.type))
return false;
/* If the padding and mode of the type is such that a copy into
a register would put it into the wrong part of the register. */
- if (mode == BLKmode
- && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
- && (targetm.calls.function_arg_padding (mode, type)
+ if (arg.mode == BLKmode
+ && int_size_in_bytes (arg.type) % (PARM_BOUNDARY / BITS_PER_UNIT)
+ && (targetm.calls.function_arg_padding (arg.mode, arg.type)
== (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
return true;
return false;
}
+/* Return true if TYPE must be passed on the stack when passed to
+ the "..." arguments of a function. */
+
+bool
+must_pass_va_arg_in_stack (tree type)
+{
+ function_arg_info arg (type, /*named=*/false);
+ return targetm.calls.must_pass_in_stack (arg);
+}
+
/* Tell the garbage collector about GTY markers in this source file. */
#include "gt-calls.h"