static void maybe_emit_free_warning (tree);
static tree fold_builtin_object_size (tree, tree);
static bool check_read_access (tree, tree, tree = NULL_TREE, int = 1);
-static bool compute_objsize (tree, int, access_ref *, ssa_name_limit_t &,
- range_query *);
+static bool compute_objsize_r (tree, int, access_ref *, ssa_name_limit_t &,
+ pointer_query *);
unsigned HOST_WIDE_INT target_newline;
unsigned HOST_WIDE_INT target_percent;
access_ref *pref /* = NULL */,
int ostype /* = 1 */,
ssa_name_limit_t *psnlim /* = NULL */,
- range_query *rvals /* = NULL */) const
+ pointer_query *qry /* = NULL */) const
{
gphi *phi_stmt = this->phi ();
if (!phi_stmt)
/* The conservative result of the PHI reflecting the offset and size
of the largest PHI argument, regardless of whether or not they all
refer to the same object. */
+ pointer_query empty_qry;
+ if (!qry)
+ qry = &empty_qry;
+
access_ref phi_ref;
if (pref)
{
{
access_ref phi_arg_ref;
tree arg = gimple_phi_arg_def (phi_stmt, i);
- if (!compute_objsize (arg, ostype, &phi_arg_ref, *psnlim, rvals)
+ if (!compute_objsize_r (arg, ostype, &phi_arg_ref, *psnlim, qry)
|| phi_arg_ref.sizrng[0] < 0)
/* A PHI with all null pointer arguments. */
return NULL_TREE;
/* Add PREF's offset to that of the argument. */
phi_arg_ref.add_offset (orng[0], orng[1]);
+ if (TREE_CODE (arg) == SSA_NAME)
+ qry->put_ref (arg, phi_arg_ref);
if (all_refs)
all_refs->safe_push (phi_arg_ref);
return false;
--ssa_def_max;
-
return true;
}
BITMAP_FREE (visited);
}
+/* Default ctor. Initialize object with pointers to the range_query
+ and cache_type instances to use or null. */
+
+pointer_query::pointer_query (range_query *qry /* = NULL */,
+ cache_type *cache /* = NULL */)
+: rvals (qry), var_cache (cache), hits (), misses (),
+ failures (), depth (), max_depth ()
+{
+ /* No op. */
+}
+
+/* Return a pointer to the cached access_ref instance for the SSA_NAME
+ PTR if it's there or null otherwise. */
+
+const access_ref *
+pointer_query::get_ref (tree ptr, int ostype /* = 1 */) const
+{
+ if (!var_cache)
+ {
+ ++misses;
+ return NULL;
+ }
+
+ unsigned version = SSA_NAME_VERSION (ptr);
+ unsigned idx = version << 1 | (ostype & 1);
+ if (var_cache->indices.length () <= idx)
+ {
+ ++misses;
+ return NULL;
+ }
+
+ unsigned cache_idx = var_cache->indices[idx];
+ if (var_cache->access_refs.length () <= cache_idx)
+ {
+ ++misses;
+ return NULL;
+ }
+
+ access_ref &cache_ref = var_cache->access_refs[cache_idx];
+ if (cache_ref.ref)
+ {
+ ++hits;
+ return &cache_ref;
+ }
+
+ ++misses;
+ return NULL;
+}
+
+/* Retrieve the access_ref instance for a variable from the cache if it's
+ there or compute it and insert it into the cache if it's nonnonull. */
+
+bool
+pointer_query::get_ref (tree ptr, access_ref *pref, int ostype /* = 1 */)
+{
+ const unsigned version
+ = TREE_CODE (ptr) == SSA_NAME ? SSA_NAME_VERSION (ptr) : 0;
+
+ if (var_cache && version)
+ {
+ unsigned idx = version << 1 | (ostype & 1);
+ if (idx < var_cache->indices.length ())
+ {
+ unsigned cache_idx = var_cache->indices[idx] - 1;
+ if (cache_idx < var_cache->access_refs.length ()
+ && var_cache->access_refs[cache_idx].ref)
+ {
+ ++hits;
+ *pref = var_cache->access_refs[cache_idx];
+ return true;
+ }
+ }
+
+ ++misses;
+ }
+
+ if (!compute_objsize (ptr, ostype, pref, this))
+ {
+ ++failures;
+ return false;
+ }
+
+ return true;
+}
+
+/* Add a copy of the access_ref REF for the SSA_NAME to the cache if it's
+ nonnull. */
+
+void
+pointer_query::put_ref (tree ptr, const access_ref &ref, int ostype /* = 1 */)
+{
+ /* Only add populated/valid entries. */
+ if (!var_cache || !ref.ref || ref.sizrng[0] < 0)
+ return;
+
+ /* Add REF to the two-level cache. */
+ unsigned version = SSA_NAME_VERSION (ptr);
+ unsigned idx = version << 1 | (ostype & 1);
+
+ /* Grow INDICES if necessary. An index is valid if it's nonzero.
+ Its value minus one is the index into ACCESS_REFS. Not all
+ entries are valid. */
+ if (var_cache->indices.length () <= idx)
+ var_cache->indices.safe_grow_cleared (idx + 1);
+
+ if (!var_cache->indices[idx])
+ var_cache->indices[idx] = var_cache->access_refs.length () + 1;
+
+ /* Grow ACCESS_REF cache if necessary. An entry is valid if its
+ REF member is nonnull. All entries except for the last two
+ are valid. Once nonnull, the REF value must stay unchanged. */
+ unsigned cache_idx = var_cache->indices[idx];
+ if (var_cache->access_refs.length () <= cache_idx)
+ var_cache->access_refs.safe_grow_cleared (cache_idx + 1);
+
+ access_ref cache_ref = var_cache->access_refs[cache_idx - 1];
+ if (cache_ref.ref)
+ {
+ gcc_checking_assert (cache_ref.ref == ref.ref);
+ return;
+ }
+
+ cache_ref = ref;
+}
+
+/* Flush the cache if it's nonnull. */
+
+void
+pointer_query::flush_cache ()
+{
+ if (!var_cache)
+ return;
+ var_cache->indices.release ();
+ var_cache->access_refs.release ();
+}
+
/* Return true if NAME starts with __builtin_ or __sync_. */
static bool
static bool
handle_min_max_size (gimple *stmt, int ostype, access_ref *pref,
- ssa_name_limit_t &snlim, range_query *rvals)
+ ssa_name_limit_t &snlim, pointer_query *qry)
{
tree_code code = gimple_assign_rhs_code (stmt);
determined from the other instead, adjusted up or down as appropriate
for the expression. */
access_ref aref[2] = { *pref, *pref };
- if (!compute_objsize (ptr, ostype, &aref[0], snlim, rvals))
+ if (!compute_objsize_r (ptr, ostype, &aref[0], snlim, qry))
{
aref[0].base0 = false;
aref[0].offrng[0] = aref[0].offrng[1] = 0;
}
ptr = gimple_assign_rhs2 (stmt);
- if (!compute_objsize (ptr, ostype, &aref[1], snlim, rvals))
+ if (!compute_objsize_r (ptr, ostype, &aref[1], snlim, qry))
{
aref[1].base0 = false;
aref[1].offrng[0] = aref[1].offrng[1] = 0;
to influence code generation or optimization. */
static bool
-compute_objsize (tree ptr, int ostype, access_ref *pref,
- ssa_name_limit_t &snlim, range_query *rvals)
+compute_objsize_r (tree ptr, int ostype, access_ref *pref,
+ ssa_name_limit_t &snlim, pointer_query *qry)
{
STRIP_NOPS (ptr);
}
const tree_code code = TREE_CODE (ptr);
+ range_query *const rvals = qry ? qry->rvals : NULL;
if (code == BIT_FIELD_REF)
{
tree ref = TREE_OPERAND (ptr, 0);
- if (!compute_objsize (ref, ostype, pref, snlim, rvals))
+ if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
return false;
offset_int off = wi::to_offset (pref->eval (TREE_OPERAND (ptr, 2)));
/* In OSTYPE zero (for raw memory functions like memcpy), use
the maximum size instead if the identity of the enclosing
object cannot be determined. */
- if (!compute_objsize (ref, ostype, pref, snlim, rvals))
+ if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
return false;
/* Otherwise, use the size of the enclosing object and add
return false;
}
- if (!compute_objsize (ref, ostype, pref, snlim, rvals))
+ if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
return false;
offset_int orng[2];
if (code == TARGET_MEM_REF)
{
tree ref = TREE_OPERAND (ptr, 0);
- if (!compute_objsize (ref, ostype, pref, snlim, rvals))
+ if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
return false;
/* TODO: Handle remaining operands. Until then, add maximum offset. */
if (code == POINTER_PLUS_EXPR)
{
tree ref = TREE_OPERAND (ptr, 0);
- if (!compute_objsize (ref, ostype, pref, snlim, rvals))
+ if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
return false;
offset_int orng[2];
if (code == VIEW_CONVERT_EXPR)
{
ptr = TREE_OPERAND (ptr, 0);
- return compute_objsize (ptr, ostype, pref, snlim, rvals);
+ return compute_objsize_r (ptr, ostype, pref, snlim, qry);
}
if (code == SSA_NAME)
/* Only process an SSA_NAME if the recursion limit has not yet
been reached. */
+ if (qry)
+ {
+ if (++qry->depth)
+ qry->max_depth = qry->depth;
+ if (const access_ref *cache_ref = qry->get_ref (ptr))
+ {
+ /* If the pointer is in the cache set *PREF to what it refers
+ to and return success. */
+ *pref = *cache_ref;
+ return true;
+ }
+ }
+
gimple *stmt = SSA_NAME_DEF_STMT (ptr);
if (is_gimple_call (stmt))
{
offset_int offrng[2];
if (tree ret = gimple_call_return_array (stmt, offrng, rvals))
{
- if (!compute_objsize (ret, ostype, pref, snlim, rvals))
+ if (!compute_objsize_r (ret, ostype, pref, snlim, qry))
return false;
/* Cap OFFRNG[1] to at most the remaining size of
pref->ref = ptr;
}
}
+ qry->put_ref (ptr, *pref);
return true;
}
pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
pref->ref = ref;
+ qry->put_ref (ptr, *pref);
return true;
}
pref->set_max_size_range ();
pref->base0 = false;
pref->ref = ptr;
+ qry->put_ref (ptr, *pref);
return true;
}
{
pref->ref = ptr;
access_ref phi_ref = *pref;
- if (!pref->get_ref (NULL, &phi_ref, ostype, &snlim, rvals))
+ if (!pref->get_ref (NULL, &phi_ref, ostype, &snlim, qry))
return false;
*pref = phi_ref;
pref->ref = ptr;
+ qry->put_ref (ptr, *pref);
return true;
}
tree_code code = gimple_assign_rhs_code (stmt);
if (code == MAX_EXPR || code == MIN_EXPR)
- return handle_min_max_size (stmt, ostype, pref, snlim, rvals);
+ {
+ if (!handle_min_max_size (stmt, ostype, pref, snlim, qry))
+ return false;
+ qry->put_ref (ptr, *pref);
+ return true;
+ }
tree rhs = gimple_assign_rhs1 (stmt);
&& TREE_CODE (TREE_TYPE (rhs)) == POINTER_TYPE)
{
/* Compute the size of the object first. */
- if (!compute_objsize (rhs, ostype, pref, snlim, rvals))
+ if (!compute_objsize_r (rhs, ostype, pref, snlim, qry))
return false;
offset_int orng[2];
pref->add_offset (orng[0], orng[1]);
else
pref->add_max_offset ();
+ qry->put_ref (ptr, *pref);
return true;
}
if (code == ADDR_EXPR
|| code == SSA_NAME)
- return compute_objsize (rhs, ostype, pref, snlim, rvals);
+ return compute_objsize_r (rhs, ostype, pref, snlim, qry);
/* (This could also be an assignment from a nonlocal pointer.) Save
PTR to mention in diagnostics but otherwise treat it as a pointer
pref->ref = ptr;
pref->base0 = false;
pref->set_max_size_range ();
+ if (TREE_CODE (ptr) == SSA_NAME)
+ qry->put_ref (ptr, *pref);
return true;
}
compute_objsize (tree ptr, int ostype, access_ref *pref,
range_query *rvals /* = NULL */)
{
+ pointer_query qry;
+ qry.rvals = rvals;
+ ssa_name_limit_t snlim;
+ if (!compute_objsize_r (ptr, ostype, pref, snlim, &qry))
+ return NULL_TREE;
+
+ offset_int maxsize = pref->size_remaining ();
+ if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0)
+ pref->offrng[0] = 0;
+ return wide_int_to_tree (sizetype, maxsize);
+}
+
+/* Transitional wrapper. The function should be removed once callers
+ transition to the pointer_query API. */
+
+tree
+compute_objsize (tree ptr, int ostype, access_ref *pref, pointer_query *ptr_qry)
+{
+ pointer_query qry;
+ if (ptr_qry)
+ ptr_qry->depth = 0;
+ else
+ ptr_qry = &qry;
+
ssa_name_limit_t snlim;
- if (!compute_objsize (ptr, ostype, pref, snlim, rvals))
+ if (!compute_objsize_r (ptr, ostype, pref, snlim, ptr_qry))
return NULL_TREE;
offset_int maxsize = pref->size_remaining ();
return wide_int_to_tree (sizetype, maxsize);
}
-/* Transitional wrapper around the above. The function should be removed
+/* Legacy wrapper around the above. The function should be removed
once callers transition to one of the two above. */
tree
~ssa_name_limit_t ();
};
-class range_query;
+class pointer_query;
/* Describes a reference to an object used in an access. */
struct access_ref
/* Return the object to which REF refers. */
tree get_ref (vec<access_ref> *, access_ref * = NULL, int = 1,
- ssa_name_limit_t * = NULL, range_query * = NULL) const;
+ ssa_name_limit_t * = NULL, pointer_query * = NULL) const;
/* Return true if OFFRNG is the constant zero. */
bool offset_zero () const
bool parmarray;
};
+class range_query;
+
+/* Queries and caches compute_objsize results. */
+class pointer_query
+{
+ DISABLE_COPY_AND_ASSIGN (pointer_query);
+
+public:
+ /* Type of the two-level cache object defined by clients of the class
+ to have pointer SSA_NAMEs cached for speedy access. */
+ struct cache_type
+ {
+ /* 1-based indices into cache. */
+ vec<unsigned> indices;
+ /* The cache itself. */
+ vec<access_ref> access_refs;
+ };
+
+ /* Construct an object with the given Ranger instance and cache. */
+ explicit pointer_query (range_query * = NULL, cache_type * = NULL);
+
+ /* Retrieve the access_ref for a variable from cache if it's there. */
+ const access_ref* get_ref (tree, int = 1) const;
+
+ /* Retrieve the access_ref for a variable from cache or compute it. */
+ bool get_ref (tree, access_ref*, int = 1);
+
+ /* Add an access_ref for the SSA_NAME to the cache. */
+ void put_ref (tree, const access_ref&, int = 1);
+
+ /* Flush the cache. */
+ void flush_cache ();
+
+ /* A Ranger instance. May be null to use global ranges. */
+ range_query *rvals;
+ /* Cache of SSA_NAMEs. May be null to disable caching. */
+ cache_type *var_cache;
+
+ /* Cache performance counters. */
+ mutable unsigned hits;
+ mutable unsigned misses;
+ mutable unsigned failures;
+ mutable unsigned depth;
+ mutable unsigned max_depth;
+};
+
/* Describes a pair of references used in an access by built-in
functions like memcpy. */
struct access_data
extern tree gimple_call_alloc_size (gimple *, wide_int[2] = NULL,
range_query * = NULL);
extern tree gimple_parm_array_size (tree, wide_int[2], bool * = NULL);
+
extern tree compute_objsize (tree, int, access_ref *, range_query * = NULL);
+/* Legacy/transitional API. Should not be used in new code. */
+extern tree compute_objsize (tree, int, access_ref *, pointer_query *);
extern tree compute_objsize (tree, int, tree * = NULL, tree * = NULL,
range_query * = NULL);
extern bool check_access (tree, tree, tree, tree, tree,
available, or the maximum possible size otherwise. */
static unsigned HOST_WIDE_INT
-get_destination_size (tree dest)
+get_destination_size (tree dest, pointer_query &ptr_qry)
{
/* When there is no destination return the maximum. */
if (!dest)
return HOST_WIDE_INT_MAX;
- /* Initialize object size info before trying to compute it. */
- init_object_sizes ();
+ /* Use compute_objsize to determine the size of the destination object. */
+ access_ref aref;
+ if (!ptr_qry.get_ref (dest, &aref))
+ return HOST_WIDE_INT_MAX;
- /* Use __builtin_object_size to determine the size of the destination
- object. When optimizing, determine the smallest object (such as
- a member array as opposed to the whole enclosing object), otherwise
- use type-zero object size to determine the size of the enclosing
- object (the function fails without optimization in this type). */
- int ost = optimize > 0;
- unsigned HOST_WIDE_INT size;
- if (compute_builtin_object_size (dest, ost, &size))
- return size;
+ offset_int remsize = aref.size_remaining ();
+ if (!wi::fits_uhwi_p (remsize))
+ return HOST_WIDE_INT_MAX;
- return HOST_WIDE_INT_MAX;
+ return remsize.to_uhwi ();
}
/* Return true if the call described by INFO with result RES safe to
gsi_next should not be performed in the caller. */
bool
-handle_printf_call (gimple_stmt_iterator *gsi, range_query *query)
+handle_printf_call (gimple_stmt_iterator *gsi, pointer_query &ptr_qry)
{
init_target_to_host_charmap ();
/* For non-bounded functions like sprintf, determine the size
of the destination from the object or pointer passed to it
as the first argument. */
- dstsize = get_destination_size (dstptr);
+ dstsize = get_destination_size (dstptr, ptr_qry);
}
else if (tree size = gimple_call_arg (info.callstmt, idx_dstsize))
{
and use the greater of the two at level 1 and the smaller
of them at level 2. */
value_range vr;
- query->range_of_expr (vr, size, info.callstmt);
+ ptr_qry.rvals->range_of_expr (vr, size, info.callstmt);
if (!vr.undefined_p ())
{
never set to true again). */
res.posunder4k = posunder4k && dstptr;
- bool success = compute_format_length (info, &res, query);
+ bool success = compute_format_length (info, &res, ptr_qry.rvals);
if (res.warned)
gimple_set_no_warning (info.callstmt, true);
--- /dev/null
+/* PR middle-end/97373 - missing warning on sprintf into allocated destination
+ { dg-do compile }
+ { dg-options "-O2 -Wall -ftrack-macro-expansion=0" } */
+
+#include "../range.h"
+
+extern void* alloca (size_t);
+extern void* malloc (size_t);
+
+extern int sprintf (char*, const char*, ...);
+#define sprintf(d, ...) (sprintf (d, __VA_ARGS__), sink (d))
+
+void sink (void*, ...);
+
+void test_alloca_range (void)
+{
+ int n1_2 = UR (1, 2);
+ int n5_9 = UR (5, 9);
+
+ char *d = (char*)alloca (n5_9);
+
+ sprintf (d, "%i", 12345);
+
+ d += n1_2;
+ sprintf (d, "%i", 12345);
+
+ d += n1_2;
+ sprintf (d, "%i", 12345);
+
+ d += n1_2;
+ sprintf (d, "%i", 12345);
+
+ d += n1_2;
+ sprintf (d, "%i", 12345); // { dg-warning "writing a terminating nul past the end of the destination" }
+
+ d += n1_2;
+ sprintf (d, "%i", 12345); // { dg-warning "'%i' directive writing 5 bytes into a region of size 4" }
+}
+
+
+void test_malloc_range (void)
+{
+ int n2_3 = UR (2, 3);
+ int n5_9 = UR (5, 9);
+
+ char *d = (char*)malloc (n5_9);
+
+ sprintf (d, "%i", 12345);
+
+ d += n2_3;
+ sprintf (d, "%i", 12345);
+
+ d += n2_3;
+ sprintf (d, "%i", 12345); // { dg-warning "writing a terminating nul past the end of the destination" }
+
+ d += n2_3;
+ sprintf (d, "%i", 12345); // { dg-warning "'%i' directive writing 5 bytes into a region of size 3" }
+}
+
+
+void test_vla_range (void)
+{
+ int n3_4 = UR (3, 4);
+ int n5_9 = UR (5, 9);
+
+ char vla[n5_9];
+ char *d = vla;
+
+ sprintf (d, "%i", 12345);
+
+ d += n3_4;
+ sprintf (d, "%i", 12345);
+
+ d += n3_4;
+ sprintf (d, "%i", 12345); // { dg-warning "'%i' directive writing 5 bytes into a region of size 3" }
+}
#include "tree-ssa-propagate.h"
#include "tree-ssa-strlen.h"
#include "tree-hash-traits.h"
-#include "tree-object-size.h"
#include "builtins.h"
#include "target.h"
#include "diagnostic-core.h"
strinfo. */
static void
-adjust_last_stmt (strinfo *si, gimple *stmt, bool is_strcat)
+adjust_last_stmt (strinfo *si, gimple *stmt, bool is_strcat,
+ pointer_query &ptr_qry)
{
tree vuse, callee, len;
struct laststmt_struct last = laststmt;
/* Don't fold away an out of bounds access, as this defeats proper
warnings. */
tree dst = gimple_call_arg (last.stmt, 0);
- tree size = compute_objsize (dst, 0);
+
+ access_ref aref;
+ tree size = compute_objsize (dst, 1, &aref, &ptr_qry);
if (size && tree_int_cst_lt (size, len))
return;
}
to allow accesses across subobject boundaries. */
static void
-maybe_warn_overflow (gimple *stmt, tree len,
- range_query *rvals = NULL,
+maybe_warn_overflow (gimple *stmt, tree len, pointer_query &ptr_qry,
strinfo *si = NULL, bool plus_one = false,
bool rawmem = false)
{
if (TREE_NO_WARNING (dest))
return;
+ const int ostype = rawmem ? 0 : 1;
+
/* Use maximum precision to avoid overflow in the addition below.
Make sure all operands have the same precision to keep wide_int
from ICE'ing. */
access_ref aref;
/* The size of the destination region (which is smaller than
the destination object for stores at a non-zero offset). */
- tree destsize = compute_objsize (dest, rawmem ? 0 : 1, &aref, rvals);
+ tree destsize = compute_objsize (dest, ostype, &aref, &ptr_qry);
+
if (!destsize)
{
aref.sizrng[0] = 0;
return;
wide_int rng[2];
- if (!get_range (len, stmt, rng, rvals))
+ if (!get_range (len, stmt, rng, ptr_qry.rvals))
return;
widest_int lenrng[2] =
static inline void
maybe_warn_overflow (gimple *stmt, unsigned HOST_WIDE_INT len,
- range_query *rvals = NULL, strinfo *si = NULL,
+ pointer_query &ptr_qry, strinfo *si = NULL,
bool plus_one = false, bool rawmem = false)
{
- maybe_warn_overflow (stmt, build_int_cst (size_type_node, len), rvals,
+ maybe_warn_overflow (stmt, build_int_cst (size_type_node, len), ptr_qry,
si, plus_one, rawmem);
}
static void
handle_builtin_strcpy (enum built_in_function bcode, gimple_stmt_iterator *gsi,
- range_query *rvals)
+ pointer_query &ptr_qry)
{
int idx, didx;
tree src, dst, srclen, len, lhs, type, fn, oldlen;
return;
if (olddsi != NULL)
- adjust_last_stmt (olddsi, stmt, false);
+ adjust_last_stmt (olddsi, stmt, false, ptr_qry);
srclen = NULL_TREE;
if (si != NULL)
else if (idx < 0)
srclen = build_int_cst (size_type_node, ~idx);
- maybe_warn_overflow (stmt, srclen, rvals, olddsi, true);
+ maybe_warn_overflow (stmt, srclen, ptr_qry, olddsi, true);
if (olddsi != NULL)
- adjust_last_stmt (olddsi, stmt, false);
+ adjust_last_stmt (olddsi, stmt, false, ptr_qry);
loc = gimple_location (stmt);
if (srclen == NULL_TREE)
*/
bool
-maybe_diag_stxncpy_trunc (gimple_stmt_iterator gsi, tree src, tree cnt)
+maybe_diag_stxncpy_trunc (gimple_stmt_iterator gsi, tree src, tree cnt,
+ pointer_query *ptr_qry /* = NULL */)
{
gimple *stmt = gsi_stmt (gsi);
if (gimple_no_warning_p (stmt))
}
}
- if (tree dstsize = compute_objsize (dst, 1))
+ access_ref aref;
+ if (tree dstsize = compute_objsize (dst, 1, &aref, ptr_qry))
{
/* The source length is unknown. Try to determine the destination
size and see if it matches the specified bound. If not, bail.
/* Avoid warning for strncpy(a, b, N) calls where the following
equalities hold:
N == sizeof a && N == sizeof b */
- if (tree srcsize = compute_objsize (src, 1))
+ if (tree srcsize = compute_objsize (src, 1, &aref, ptr_qry))
if (wi::to_wide (srcsize) == cntrange[1])
return false;
static void
handle_builtin_memcpy (enum built_in_function bcode, gimple_stmt_iterator *gsi,
- range_query *rvals)
+ pointer_query &ptr_qry)
{
tree lhs, oldlen, newlen;
gimple *stmt = gsi_stmt (*gsi);
if (olddsi != NULL
&& !integer_zerop (len))
{
- maybe_warn_overflow (stmt, len, rvals, olddsi, false, false);
- adjust_last_stmt (olddsi, stmt, false);
+ maybe_warn_overflow (stmt, len, ptr_qry, olddsi, false, false);
+ adjust_last_stmt (olddsi, stmt, false, ptr_qry);
}
int idx = get_stridx (src);
}
if (olddsi != NULL && TREE_CODE (len) == SSA_NAME)
- adjust_last_stmt (olddsi, stmt, false);
+ adjust_last_stmt (olddsi, stmt, false, ptr_qry);
if (didx == 0)
{
is known. */
static void
-handle_builtin_strcat (enum built_in_function bcode, gimple_stmt_iterator *gsi)
+handle_builtin_strcat (enum built_in_function bcode, gimple_stmt_iterator *gsi,
+ pointer_query &ptr_qry)
{
int idx, didx;
tree srclen, args, type, fn, objsz, endptr;
computed by transforming this strcpy into stpcpy. */
if (srclen == NULL_TREE && dsi->dont_invalidate)
dsi->stmt = stmt;
- adjust_last_stmt (dsi, stmt, true);
+ adjust_last_stmt (dsi, stmt, true, ptr_qry);
if (srclen != NULL_TREE)
{
laststmt.stmt = stmt;
static bool
handle_builtin_memset (gimple_stmt_iterator *gsi, bool *zero_write,
- range_query *rvals)
+ pointer_query &ptr_qry)
{
gimple *memset_stmt = gsi_stmt (*gsi);
tree ptr = gimple_call_arg (memset_stmt, 0);
/* Set to the non-constant offset added to PTR. */
wide_int offrng[2];
- int idx1 = get_stridx (ptr, offrng, rvals);
+ int idx1 = get_stridx (ptr, offrng, ptr_qry.rvals);
if (idx1 <= 0)
return false;
strinfo *si1 = get_strinfo (idx1);
tree memset_size = gimple_call_arg (memset_stmt, 2);
/* Check for overflow. */
- maybe_warn_overflow (memset_stmt, memset_size, rvals, NULL, false, false);
+ maybe_warn_overflow (memset_stmt, memset_size, ptr_qry, NULL, false, false);
/* Bail when there is no statement associated with the destination
(the statement may be null even when SI1->ALLOC is not). */
static bool
handle_store (gimple_stmt_iterator *gsi, bool *zero_write,
- range_query *rvals)
+ pointer_query &ptr_qry)
{
int idx = -1;
strinfo *si = NULL;
tree ssaname = NULL_TREE, lhs = gimple_assign_lhs (stmt);
tree rhs = gimple_assign_rhs1 (stmt);
+ range_query *const rvals = ptr_qry.rvals;
+
/* The offset of the first byte in LHS modified by the store. */
unsigned HOST_WIDE_INT offset = 0;
unsigned lenrange[] = { UINT_MAX, 0, 0 };
if (count_nonzero_bytes (rhs, lenrange, &dummy, &dummy, &dummy,
rvals))
- maybe_warn_overflow (stmt, lenrange[2], rvals);
+ maybe_warn_overflow (stmt, lenrange[2], ptr_qry);
return true;
}
storing_nonzero_p = lenrange[1] > 0;
*zero_write = storing_all_zeros_p;
- maybe_warn_overflow (stmt, lenrange[2], rvals);
+ maybe_warn_overflow (stmt, lenrange[2], ptr_qry);
}
else
{
/* We're overwriting the nul terminator with a nonzero or
unknown character. If the previous stmt was a memcpy,
its length may be decreased. */
- adjust_last_stmt (si, stmt, false);
+ adjust_last_stmt (si, stmt, false, ptr_qry);
si = unshare_strinfo (si);
if (storing_nonzero_p)
{
static bool
strlen_check_and_optimize_call (gimple_stmt_iterator *gsi, bool *zero_write,
- range_query *rvals)
+ pointer_query &ptr_qry)
{
gimple *stmt = gsi_stmt (*gsi);
if (!flag_optimize_strlen
|| !strlen_optimize
|| !valid_builtin_call (stmt))
- return !handle_printf_call (gsi, rvals);
+ return !handle_printf_call (gsi, ptr_qry);
tree callee = gimple_call_fndecl (stmt);
switch (DECL_FUNCTION_CODE (callee))
case BUILT_IN_STRCPY_CHK:
case BUILT_IN_STPCPY:
case BUILT_IN_STPCPY_CHK:
- handle_builtin_strcpy (DECL_FUNCTION_CODE (callee), gsi, rvals);
+ handle_builtin_strcpy (DECL_FUNCTION_CODE (callee), gsi, ptr_qry);
break;
case BUILT_IN_STRNCAT:
case BUILT_IN_MEMCPY_CHK:
case BUILT_IN_MEMPCPY:
case BUILT_IN_MEMPCPY_CHK:
- handle_builtin_memcpy (DECL_FUNCTION_CODE (callee), gsi, rvals);
+ handle_builtin_memcpy (DECL_FUNCTION_CODE (callee), gsi, ptr_qry);
break;
case BUILT_IN_STRCAT:
case BUILT_IN_STRCAT_CHK:
- handle_builtin_strcat (DECL_FUNCTION_CODE (callee), gsi);
+ handle_builtin_strcat (DECL_FUNCTION_CODE (callee), gsi, ptr_qry);
break;
case BUILT_IN_ALLOCA:
case BUILT_IN_ALLOCA_WITH_ALIGN:
handle_alloc_call (DECL_FUNCTION_CODE (callee), gsi);
break;
case BUILT_IN_MEMSET:
- if (handle_builtin_memset (gsi, zero_write, rvals))
+ if (handle_builtin_memset (gsi, zero_write, ptr_qry))
return false;
break;
case BUILT_IN_MEMCMP:
break;
case BUILT_IN_STRCMP:
case BUILT_IN_STRNCMP:
- if (handle_builtin_string_cmp (gsi, rvals))
+ if (handle_builtin_string_cmp (gsi, ptr_qry.rvals))
return false;
break;
default:
- if (handle_printf_call (gsi, rvals))
+ if (handle_printf_call (gsi, ptr_qry))
return false;
break;
}
static bool
check_and_optimize_stmt (gimple_stmt_iterator *gsi, bool *cleanup_eh,
- range_query *rvals)
+ pointer_query &ptr_qry)
{
gimple *stmt = gsi_stmt (*gsi);
if (is_gimple_call (stmt))
{
- if (!strlen_check_and_optimize_call (gsi, &zero_write, rvals))
+ if (!strlen_check_and_optimize_call (gsi, &zero_write, ptr_qry))
return false;
}
else if (!flag_optimize_strlen || !strlen_optimize)
}
else if (TREE_CODE (lhs) == SSA_NAME && INTEGRAL_TYPE_P (lhs_type))
/* Handle assignment to a character. */
- handle_integral_assign (gsi, cleanup_eh, rvals);
+ handle_integral_assign (gsi, cleanup_eh, ptr_qry.rvals);
else if (TREE_CODE (lhs) != SSA_NAME && !TREE_SIDE_EFFECTS (lhs))
{
tree type = TREE_TYPE (lhs);
}
/* Handle a single or multibyte assignment. */
- if (is_char_store && !handle_store (gsi, &zero_write, rvals))
+ if (is_char_store && !handle_store (gsi, &zero_write, ptr_qry))
return false;
}
}
strlen_dom_walker (cdi_direction direction)
: dom_walker (direction),
evrp (false),
+ ptr_qry (&evrp, &var_cache),
+ var_cache (),
m_cleanup_cfg (false)
- {}
+ { }
virtual edge before_dom_children (basic_block);
virtual void after_dom_children (basic_block);
to track strlen results across integer variable assignments. */
evrp_range_analyzer evrp;
+ /* A pointer_query object and its cache to store information about
+ pointers and their targets in. */
+ pointer_query ptr_qry;
+ pointer_query::cache_type var_cache;
+
/* Flag that will trigger TODO_cleanup_cfg to be returned in strlen
execute function. */
bool m_cleanup_cfg;
can be used by printf argument processing. */
evrp.record_ranges_from_stmt (stmt, false);
- if (check_and_optimize_stmt (&gsi, &cleanup_eh, &evrp))
+ /* Reset search depth preformance counter. */
+ ptr_qry.depth = 0;
+
+ if (check_and_optimize_stmt (&gsi, &cleanup_eh, ptr_qry))
gsi_next (&gsi);
}
strlen_dom_walker walker (CDI_DOMINATORS);
walker.walk (ENTRY_BLOCK_PTR_FOR_FN (fun));
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ unsigned nused = 0;
+ unsigned nidxs = walker.ptr_qry.var_cache->indices.length ();
+ for (unsigned i = 0; i != nidxs; ++i)
+ if (walker.ptr_qry.var_cache->indices[i])
+ ++nused;
+
+ fprintf (dump_file, "pointer_query counters\n"
+ " index cache size: %u\n"
+ " utilization: %u%%\n"
+ " access cache size: %u\n"
+ " hits: %u\n"
+ " misses: %u\n"
+ " failures: %u\n"
+ " max_depth: %u\n",
+ nidxs,
+ (nused * 100) / nidxs,
+ walker.ptr_qry.var_cache->access_refs.length (),
+ walker.ptr_qry.hits, walker.ptr_qry.misses,
+ walker.ptr_qry.failures, walker.ptr_qry.max_depth);
+ }
+
ssa_ver_to_stridx.release ();
strinfo_pool.release ();
if (decl_to_stridxlist_htab)
loop_optimizer_finalize ();
}
- /* Clean up object size info. */
- fini_object_sizes ();
-
return walker.m_cleanup_cfg ? TODO_cleanup_cfg : 0;
}
#ifndef GCC_TREE_SSA_STRLEN_H
#define GCC_TREE_SSA_STRLEN_H
+class pointer_query;
+
extern bool is_strlen_related_p (tree, tree);
-extern bool maybe_diag_stxncpy_trunc (gimple_stmt_iterator, tree, tree);
+extern bool maybe_diag_stxncpy_trunc (gimple_stmt_iterator, tree, tree,
+ pointer_query * = NULL);
extern tree set_strlen_range (tree, wide_int, wide_int, tree = NULL_TREE);
extern tree get_range (tree, gimple *, wide_int[2],
class range_query *);
/* APIs internal to strlen pass. Defined in gimple-ssa-sprintf.c. */
-extern bool handle_printf_call (gimple_stmt_iterator *, class range_query *);
+extern bool handle_printf_call (gimple_stmt_iterator *, pointer_query &);
#endif // GCC_TREE_SSA_STRLEN_H