+2016-09-01 Martin Sebor <msebor@redhat.com>
+
+ PR tree-optimization/71831
+ * tree-object-size.h: Return bool instead of the size and add
+ argument for the size.
+ * tree-object-size.c (compute_object_offset): Update signature.
+ (addr_object_size): Same.
+ (compute_builtin_object_size): Return bool instead of the size
+ and add argument for the size. Handle POINTER_PLUS_EXPR when
+ optimization is disabled.
+ (expr_object_size): Adjust.
+ (plus_stmt_object_size): Adjust.
+ (pass_object_sizes::execute): Adjust.
+ * builtins.c (fold_builtin_object_size): Adjust.
+ * doc/extend.texi (Object Size Checking): Update.
+ * ubsan.c (instrument_object_size): Adjust.
+
2016-09-01 Martin Sebor <msebor@redhat.com>
* genmatch.c (parser::parse_expr): Increase buffer size to guarantee
if (TREE_CODE (ptr) == ADDR_EXPR)
{
- bytes = compute_builtin_object_size (ptr, object_size_type);
+ compute_builtin_object_size (ptr, object_size_type, &bytes);
if (wi::fits_to_tree_p (bytes, size_type_node))
return build_int_cstu (size_type_node, bytes);
}
/* If object size is not known yet, delay folding until
later. Maybe subsequent passes will help determining
it. */
- bytes = compute_builtin_object_size (ptr, object_size_type);
- if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
- && wi::fits_to_tree_p (bytes, size_type_node))
+ if (compute_builtin_object_size (ptr, object_size_type, &bytes)
+ && wi::fits_to_tree_p (bytes, size_type_node))
return build_int_cstu (size_type_node, bytes);
}
@findex __builtin___fprintf_chk
@findex __builtin___vfprintf_chk
-GCC implements a limited buffer overflow protection mechanism
-that can prevent some buffer overflow attacks.
+GCC implements a limited buffer overflow protection mechanism that can
+prevent some buffer overflow attacks by determining the sizes of objects
+into which data is about to be written and preventing the writes when
+the size isn't sufficient. The built-in functions described below yield
+the best results when used together and when optimization is enabled.
+For example, to detect object sizes across function boundaries or to
+follow pointer assignments through non-trivial control flow they rely
+on various optimization passes enabled with @option{-O2}. However, to
+a limited extent, they can be used without optimization as well.
@deftypefn {Built-in Function} {size_t} __builtin_object_size (void * @var{ptr}, int @var{type})
is a built-in construct that returns a constant number of bytes from
+2016-09-01 Martin Sebor <msebor@redhat.com>
+
+ PR tree-optimization/71831
+ * gcc.dg/builtin-object-size-16.c: New test.
+ * gcc.dg/builtin-object-size-17.c: New test.
+
2016-09-01 Jerry DeLisle <jvdelisle@gcc.gnu.org>
PR libgfortran/77393
--- /dev/null
+/* PR 71831 - __builtin_object_size poor results with no optimization
+ Verify that even without optimization __builtin_object_size returns
+ a meaningful result for a subset of simple expressins. In cases
+ where the result could not easily be made to match the one obtained
+ with optimization the built-in was made to fail instead. */
+/* { dg-do run } */
+/* { dg-options "-O0" } */
+
+static int nfails;
+
+#define TEST_FAILURE(line, obj, type, expect, result) \
+ __builtin_printf ("FAIL: line %i: __builtin_object_size(" \
+ #obj ", %i) == %zu, got %zu\n", \
+ line, type, expect, result), ++nfails
+
+#define bos(obj, type) __builtin_object_size (obj, type)
+#define size(obj, n) ((size_t)n == X ? sizeof *obj : (size_t)n)
+
+#define test(expect, type, obj) \
+ do { \
+ if (bos (obj, type) != size (obj, expect)) \
+ TEST_FAILURE (__LINE__, obj, type, size (obj, expect), bos (obj, type)); \
+ } while (0)
+
+#define T(r0, r1, r2, r3, obj) \
+ do { \
+ test (r0, 0, obj); \
+ test (r1, 1, obj); \
+ test (r2, 2, obj); \
+ test (r3, 3, obj); \
+ } while (0)
+
+/* For convenience. Substitute for 'sizeof object' in test cases where
+ the size can vary from target to target. */
+#define X (size_t)0xdeadbeef
+
+/* __builtin_object_size checking results are inconsistent for equivalent
+ expressions (see bug 71831). To avoid having duplicate the inconsistency
+ at -O0 the built-in simply fails. The results hardcoded in this test
+ are those obtained with optimization (for easy comparison) but without
+ optimization the macros below turn them into expected failures . */
+#if __OPTIMIZE__
+# define F0(n) n
+# define F1(n) n
+# define F2(n) n
+# define F3(n) n
+#else
+# define F0(n) -1
+# define F1(n) -1
+# define F2(n) 0
+# define F3(n) 0
+#endif
+
+typedef __SIZE_TYPE__ size_t;
+
+extern char ax[];
+char ax2[]; /* { dg-warning "assumed to have one element" } */
+
+extern char a0[0];
+static char a1[1];
+static char a2[2];
+static char a9[9];
+
+#if __SIZEOF_SHORT__ == 4
+extern short ia0[0];
+static short ia1[1];
+static short ia9[9];
+#elif __SIZEOF_INT__ == 4
+extern int ia0[0];
+static int ia1[1];
+static int ia9[9];
+#endif
+
+static char a2x2[2][2];
+static char a3x5[3][5];
+
+struct Sx { char n, a[]; } sx;
+struct S0 { char n, a[0]; } s0;
+struct S1 { char n, a[1]; } s1;
+struct S2 { char n, a[2]; } s2;
+struct S9 { char n, a[9]; } s9;
+
+struct S2x2 { char n, a[2][2]; } s2x2;
+struct S3x5 { char n, a[3][5]; } s3x5;
+
+static __attribute__ ((noclone, noinline)) void
+test_arrays ()
+{
+ T ( -1, -1, 0, 0, ax);
+
+ T ( 0, 0, 0, 0, a0);
+ T ( 1, 1, 1, 1, ax2);
+
+ T ( 1, 1, 1, 1, a1);
+ T ( 2, 2, 2, 2, a2);
+ T ( 9, 9, 9, 9, a9);
+
+ T ( 0, 0, 0, 0, a0);
+ T ( 1, 1, 1, 1, ax2);
+
+ T ( 0, 0, 0, 0, ia0);
+ T ( 4, 4, 4, 4, ia1);
+ T ( 36, 36, 36, 36, ia9);
+
+ /* Not all results for multidimensional arrays make sense (see
+ bug 77293). The expected results below simply reflect those
+ obtained at -O2 (modulo the known limitations at -O1). */
+ T ( 4, 4, 4, 4, a2x2);
+ T ( 4, 4, 4, 4, &a2x2[0]);
+ T ( 4, 2, 4, 2, &a2x2[0][0]);
+ T ( 0, F1 (0), 0, 0, &a2x2 + 1);
+ T ( 2, F1 ( 2), 2, F3 ( 2), &a2x2[0] + 1);
+ T ( 3, F1 ( 1), 3, F3 ( 3), &a2x2[0][0] + 1);
+
+ T ( 15, 15, 15, 15, a3x5);
+ T ( 15, 5, 15, 5, &a3x5[0][0] + 0);
+ T ( 14, F1 ( 4), 14, F3 (14), &a3x5[0][0] + 1);
+
+ T ( 1, 1, 1, 1, a1 + 0);
+ T ( 0, F1 (0), 0, 0, a1 + 1);
+ T ( 0, F1 ( 0), 0, 0, &a1 + 1);
+ /* In the following the offset is out of bounds which makes
+ the expression undefined. Still, verify that the returned
+ size is zero (and not some large number). */
+ T ( 0, F1 (0), 0, 0, a1 + 2);
+
+ T ( 2, 2, 2, 2, a2 + 0);
+ T ( 1, F1 ( 1), 1, F3 ( 1), a2 + 1);
+ T ( 0, F1 ( 0), 0, 0, a2 + 2);
+}
+
+static __attribute__ ((noclone, noinline)) void
+test_structs (struct Sx *psx, struct S0 *ps0, struct S1 *ps1, struct S9 *ps9)
+{
+ /* The expected size of a declared object with a flexible array member
+ is sizeof sx in all __builtin_object_size types. */
+ T ( X, X, X, X, &sx);
+
+ /* The expected size of an unknown object with a flexible array member
+ is unknown in all __builtin_object_size types. */
+ T ( -1, -1, 0, 0, psx);
+
+ /* The expected size of a flexible array member of a declared object
+ is zero. */
+ T ( 0, 0, 0, 0, sx.a);
+
+ /* The expected size of a flexible array member of an unknown object
+ is unknown. */
+ T ( -1, -1, 0, 0, psx->a);
+
+ /* The expected size of a declared object with a zero-length array member
+ is sizeof sx in all __builtin_object_size types. */
+ T ( X, X, X, X, &s0);
+
+ /* The expected size of an unknown object with a zero-length array member
+ is unknown in all __builtin_object_size types. */
+ T ( -1, -1, 0, 0, ps0);
+
+ /* The expected size of a zero-length array member of a declared object
+ is zero. */
+ T ( 0, 0, 0, 0, s0.a);
+
+ /* The expected size of a zero-length array member of an unknown object
+ is unknown. */
+ T ( -1, -1, 0, 0, ps0->a);
+
+ T ( X, X, X, X, &s1);
+ T ( 1, 1, 1, 1, s1.a);
+ T ( 0, F1 (0), 0, 0, s1.a + 1);
+
+ /* GCC treats arrays of all sizes that are the last member of a struct
+ as flexible array members. */
+ T ( -1, -1, 0, 0, ps1);
+ T ( -1, -1, 0, 0, ps1->a);
+ T ( -1, -1, 0, 0, ps1->a + 1);
+
+ T ( X, X, X, X, &s9);
+ T ( 9, 9, 9, 9, s9.a);
+ T ( 9, 9, 9, 9, s9.a + 0);
+ T ( 8, F1 ( 8), 8, F3 ( 8), s9.a + 1);
+ T ( 7, F1 ( 7), 7, F3 ( 7), s9.a + 2);
+ T ( 0, F1 ( 0), 0, F3 ( 0), s9.a + 9);
+
+ /* The following make little sense but see bug 77301. */
+ T ( -1, -1, 0, 0, ps9);
+ T ( -1, -1, 0, 0, ps9->a);
+ T ( -1, -1, 0, 0, ps9->a + 1);
+}
+
+int
+main()
+{
+ test_arrays ();
+
+ test_structs (&sx, &s0, &s1, &s9);
+
+ if (nfails)
+ __builtin_abort ();
+
+ return 0;
+}
--- /dev/null
+/* PR 71831 - __builtin_object_size poor results with no optimization
+ Verify that even without optimization __builtin_object_size result
+ is folded into a constant and dead code that depends on it is
+ eliminated. */
+/* { dg-do compile } */
+/* { dg-options "-O0 -fdump-tree-ssa" } */
+
+#define concat(a, b) a ## b
+#define CAT(a, b) concat (a, b)
+
+/* Create a symbol name unique to each tes and object size type. */
+#define SYM(type) CAT (CAT (CAT (failure_on_line_, __LINE__), _type_), type)
+
+/* References to the following undefined symbol which is unique for each
+ test case are expected to be eliminated. */
+#define TEST_FAILURE(type) \
+ do { \
+ extern void SYM (type)(void); \
+ SYM (type)(); \
+ } while (0)
+
+#define bos(obj, type) __builtin_object_size (obj, type)
+#define size(obj, n) ((size_t)n == X ? sizeof *obj : (size_t)n)
+
+#define test(expect, type, obj) \
+ do { \
+ if (bos (obj, type) != size (obj, expect)) \
+ TEST_FAILURE (type); \
+ } while (0)
+
+#define FOLD_ALL(r0, r1, r2, r3, obj) \
+ do { \
+ test (r0, 0, obj); \
+ test (r1, 1, obj); \
+ test (r2, 2, obj); \
+ test (r3, 3, obj); \
+ } while (0)
+
+#define FOLD_0_2(r0, r1, r2, r3, obj) \
+ do { \
+ test (r0, 0, obj); \
+ test (r2, 2, obj); \
+ } while (0)
+
+/* For convenience. Substitute for 'sizeof object' in test cases where
+ the size can vary from target to target. */
+#define X (size_t)0xdeadbeef
+
+typedef __SIZE_TYPE__ size_t;
+
+extern char ax[];
+char ax2[]; /* { dg-warning "assumed to have one element" } */
+
+extern char a0[0];
+static char a1[1];
+static char a2[2];
+static char a9[9];
+
+#if __SIZEOF_SHORT__ == 4
+extern short ia0[0];
+static short ia1[1];
+static short ia9[9];
+#elif __SIZEOF_INT__ == 4
+extern int ia0[0];
+static int ia1[1];
+static int ia9[9];
+#endif
+
+static char a2x2[2][2];
+static char a3x5[3][5];
+
+struct Sx { char n, a[]; } sx;
+struct S0 { char n, a[0]; } s0;
+struct S1 { char n, a[1]; } s1;
+struct S2 { char n, a[2]; } s2;
+struct S9 { char n, a[9]; } s9;
+
+struct S2x2 { char n, a[2][2]; } s2x2;
+struct S3x5 { char n, a[3][5]; } s3x5;
+
+static __attribute__ ((noclone, noinline)) void
+test_arrays ()
+{
+ FOLD_ALL ( 1, 1, 1, 1, ax2);
+
+ FOLD_ALL ( 1, 1, 1, 1, a1);
+ FOLD_ALL ( 2, 2, 2, 2, a2);
+ FOLD_ALL ( 9, 9, 9, 9, a9);
+
+ FOLD_ALL ( 0, 0, 0, 0, a0);
+ FOLD_ALL ( 1, 1, 1, 1, ax2);
+
+ FOLD_ALL ( 0, 0, 0, 0, ia0);
+ FOLD_ALL ( 4, 4, 4, 4, ia1);
+ FOLD_ALL ( 36, 36, 36, 36, ia9);
+
+ /* Not all results for multidimensional arrays make sense (see
+ bug 77293). The expected results below simply reflect those
+ obtained at -O2 (modulo the known limitations at -O1). */
+ FOLD_ALL ( 4, 4, 4, 4, a2x2);
+ FOLD_ALL ( 4, 4, 4, 4, &a2x2[0]);
+ FOLD_ALL ( 4, 2, 4, 2, &a2x2[0][0]);
+ FOLD_0_2 ( 0, F1 (0), 0, 0, &a2x2 + 1);
+ FOLD_0_2 ( 2, F1 ( 2), 2, F3 ( 2), &a2x2[0] + 1);
+ FOLD_0_2 ( 3, F1 ( 1), 3, F3 ( 3), &a2x2[0][0] + 1);
+
+ FOLD_ALL ( 15, 15, 15, 15, a3x5);
+ FOLD_ALL ( 15, 5, 15, 5, &a3x5[0][0] + 0);
+ FOLD_0_2 ( 14, F1 ( 4), 14, F3 (14), &a3x5[0][0] + 1);
+
+ FOLD_ALL ( 1, 1, 1, 1, a1 + 0);
+ FOLD_0_2 ( 0, F1 ( 0), 0, 0, &a1 + 1);
+ FOLD_ALL ( 2, 2, 2, 2, a2 + 0);
+ FOLD_0_2 ( 1, F1 ( 1), 1, F3 ( 1), a2 + 1);
+ FOLD_0_2 ( 0, F1 ( 0), 0, 0, a2 + 2);
+}
+
+static __attribute__ ((noclone, noinline)) void
+test_structs (void)
+{
+ /* The expected size of a declared object with a flexible array member
+ is sizeof sx in all __builtin_object_size types. */
+ FOLD_ALL ( X, X, X, X, &sx);
+
+ /* The expected size of a flexible array member of a declared object
+ is zero. */
+ FOLD_ALL ( 0, 0, 0, 0, sx.a);
+
+ /* The expected size of a declared object with a zero-length array member
+ is sizeof sx in all __builtin_object_size types. */
+ FOLD_ALL ( X, X, X, X, &s0);
+
+ /* The expected size of a zero-length array member of a declared object
+ is zero. */
+ FOLD_ALL ( 0, 0, 0, 0, s0.a);
+
+ FOLD_ALL ( X, X, X, X, &s1);
+ FOLD_ALL ( 1, 1, 1, 1, s1.a);
+ FOLD_0_2 ( 0, F1 (0), 0, 0, s1.a + 1);
+
+ FOLD_ALL ( X, X, X, X, &s9);
+ FOLD_ALL ( 9, 9, 9, 9, s9.a);
+ FOLD_ALL ( 9, 9, 9, 9, s9.a + 0);
+ FOLD_0_2 ( 8, F1 ( 8), 8, F3 ( 8), s9.a + 1);
+ FOLD_0_2 ( 7, F1 ( 7), 7, F3 ( 7), s9.a + 2);
+ FOLD_0_2 ( 0, F1 ( 0), 0, F3 ( 0), s9.a + 9);
+}
+
+int
+main()
+{
+ test_arrays ();
+ test_structs ();
+
+ return 0;
+}
+
+/* { dg-final { scan-tree-dump-not "failure_on_line" "ssa" } } */
};
static tree compute_object_offset (const_tree, const_tree);
-static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
- const_tree, int);
+static bool addr_object_size (struct object_size_info *,
+ const_tree, int, unsigned HOST_WIDE_INT *);
static unsigned HOST_WIDE_INT alloc_object_size (const gcall *, int);
static tree pass_through_call (const gcall *);
static void collect_object_sizes_for (struct object_size_info *, tree);
OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
If unknown, return unknown[object_size_type]. */
-static unsigned HOST_WIDE_INT
+static bool
addr_object_size (struct object_size_info *osi, const_tree ptr,
- int object_size_type)
+ int object_size_type, unsigned HOST_WIDE_INT *psize)
{
tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
+ /* Set to unknown and overwrite just before returning if the size
+ could be determined. */
+ *psize = unknown[object_size_type];
+
pt_var = TREE_OPERAND (ptr, 0);
while (handled_component_p (pt_var))
pt_var = TREE_OPERAND (pt_var, 0);
if (!osi || (object_size_type & 1) != 0
|| TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
{
- sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
- object_size_type & ~1);
+ compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
+ object_size_type & ~1, &sz);
}
else
{
< offset_limit)
pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
else
- return unknown[object_size_type];
+ return false;
if (pt_var != TREE_OPERAND (ptr, 0))
{
if (var != pt_var)
var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
else if (!pt_var_size)
- return unknown[object_size_type];
+ return false;
else
var_size = pt_var_size;
bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
}
}
else if (!pt_var_size)
- return unknown[object_size_type];
+ return false;
else
bytes = pt_var_size;
if (tree_fits_uhwi_p (bytes))
- return tree_to_uhwi (bytes);
+ {
+ *psize = tree_to_uhwi (bytes);
+ return true;
+ }
- return unknown[object_size_type];
+ return false;
}
}
-/* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
- second argument from __builtin_object_size. */
+/* Compute __builtin_object_size value for PTR and set *PSIZE to
+ the resulting value. OBJECT_SIZE_TYPE is the second argument
+ to __builtin_object_size. Return true on success and false
+ when the object size could not be determined. */
-unsigned HOST_WIDE_INT
-compute_builtin_object_size (tree ptr, int object_size_type)
+bool
+compute_builtin_object_size (tree ptr, int object_size_type,
+ unsigned HOST_WIDE_INT *psize)
{
gcc_assert (object_size_type >= 0 && object_size_type <= 3);
+ /* Set to unknown and overwrite just before returning if the size
+ could be determined. */
+ *psize = unknown[object_size_type];
+
if (! offset_limit)
init_offset_limit ();
if (TREE_CODE (ptr) == ADDR_EXPR)
- return addr_object_size (NULL, ptr, object_size_type);
+ return addr_object_size (NULL, ptr, object_size_type, psize);
+
+ if (TREE_CODE (ptr) != SSA_NAME
+ || !POINTER_TYPE_P (TREE_TYPE (ptr)))
+ return false;
- if (TREE_CODE (ptr) == SSA_NAME
- && POINTER_TYPE_P (TREE_TYPE (ptr))
- && computed[object_size_type] != NULL)
+ if (computed[object_size_type] == NULL)
{
- if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
+ if (optimize || object_size_type & 1)
+ return false;
+
+ /* When not optimizing, rather than failing, make a small effort
+ to determine the object size without the full benefit of
+ the (costly) computation below. */
+ gimple *def = SSA_NAME_DEF_STMT (ptr);
+ if (gimple_code (def) == GIMPLE_ASSIGN)
{
- struct object_size_info osi;
- bitmap_iterator bi;
- unsigned int i;
-
- if (num_ssa_names > object_sizes[object_size_type].length ())
- object_sizes[object_size_type].safe_grow (num_ssa_names);
- if (dump_file)
+ tree_code code = gimple_assign_rhs_code (def);
+ if (code == POINTER_PLUS_EXPR)
{
- fprintf (dump_file, "Computing %s %sobject size for ",
- (object_size_type & 2) ? "minimum" : "maximum",
- (object_size_type & 1) ? "sub" : "");
- print_generic_expr (dump_file, ptr, dump_flags);
- fprintf (dump_file, ":\n");
- }
+ tree offset = gimple_assign_rhs2 (def);
+ ptr = gimple_assign_rhs1 (def);
- osi.visited = BITMAP_ALLOC (NULL);
- osi.reexamine = BITMAP_ALLOC (NULL);
- osi.object_size_type = object_size_type;
- osi.depths = NULL;
- osi.stack = NULL;
- osi.tos = NULL;
-
- /* First pass: walk UD chains, compute object sizes that
- can be computed. osi.reexamine bitmap at the end will
- contain what variables were found in dependency cycles
- and therefore need to be reexamined. */
- osi.pass = 0;
- osi.changed = false;
- collect_object_sizes_for (&osi, ptr);
-
- /* Second pass: keep recomputing object sizes of variables
- that need reexamination, until no object sizes are
- increased or all object sizes are computed. */
- if (! bitmap_empty_p (osi.reexamine))
- {
- bitmap reexamine = BITMAP_ALLOC (NULL);
-
- /* If looking for minimum instead of maximum object size,
- detect cases where a pointer is increased in a loop.
- Although even without this detection pass 2 would eventually
- terminate, it could take a long time. If a pointer is
- increasing this way, we need to assume 0 object size.
- E.g. p = &buf[0]; while (cond) p = p + 4; */
- if (object_size_type & 2)
+ if (cst_and_fits_in_hwi (offset)
+ && compute_builtin_object_size (ptr, object_size_type, psize))
{
- osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
- osi.stack = XNEWVEC (unsigned int, num_ssa_names);
- osi.tos = osi.stack;
- osi.pass = 1;
- /* collect_object_sizes_for is changing
- osi.reexamine bitmap, so iterate over a copy. */
- bitmap_copy (reexamine, osi.reexamine);
- EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
- if (bitmap_bit_p (osi.reexamine, i))
- check_for_plus_in_loops (&osi, ssa_name (i));
-
- free (osi.depths);
- osi.depths = NULL;
- free (osi.stack);
- osi.stack = NULL;
- osi.tos = NULL;
+ /* Return zero when the offset is out of bounds. */
+ unsigned HOST_WIDE_INT off = tree_to_shwi (offset);
+ *psize = off < *psize ? *psize - off : 0;
+ return true;
}
+ }
+ }
+ return false;
+ }
- do
- {
- osi.pass = 2;
- osi.changed = false;
- /* collect_object_sizes_for is changing
- osi.reexamine bitmap, so iterate over a copy. */
- bitmap_copy (reexamine, osi.reexamine);
- EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
- if (bitmap_bit_p (osi.reexamine, i))
- {
- collect_object_sizes_for (&osi, ssa_name (i));
- if (dump_file && (dump_flags & TDF_DETAILS))
- {
- fprintf (dump_file, "Reexamining ");
- print_generic_expr (dump_file, ssa_name (i),
- dump_flags);
- fprintf (dump_file, "\n");
- }
- }
- }
- while (osi.changed);
+ if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
+ {
+ struct object_size_info osi;
+ bitmap_iterator bi;
+ unsigned int i;
+
+ if (num_ssa_names > object_sizes[object_size_type].length ())
+ object_sizes[object_size_type].safe_grow (num_ssa_names);
+ if (dump_file)
+ {
+ fprintf (dump_file, "Computing %s %sobject size for ",
+ (object_size_type & 2) ? "minimum" : "maximum",
+ (object_size_type & 1) ? "sub" : "");
+ print_generic_expr (dump_file, ptr, dump_flags);
+ fprintf (dump_file, ":\n");
+ }
- BITMAP_FREE (reexamine);
+ osi.visited = BITMAP_ALLOC (NULL);
+ osi.reexamine = BITMAP_ALLOC (NULL);
+ osi.object_size_type = object_size_type;
+ osi.depths = NULL;
+ osi.stack = NULL;
+ osi.tos = NULL;
+
+ /* First pass: walk UD chains, compute object sizes that
+ can be computed. osi.reexamine bitmap at the end will
+ contain what variables were found in dependency cycles
+ and therefore need to be reexamined. */
+ osi.pass = 0;
+ osi.changed = false;
+ collect_object_sizes_for (&osi, ptr);
+
+ /* Second pass: keep recomputing object sizes of variables
+ that need reexamination, until no object sizes are
+ increased or all object sizes are computed. */
+ if (! bitmap_empty_p (osi.reexamine))
+ {
+ bitmap reexamine = BITMAP_ALLOC (NULL);
+
+ /* If looking for minimum instead of maximum object size,
+ detect cases where a pointer is increased in a loop.
+ Although even without this detection pass 2 would eventually
+ terminate, it could take a long time. If a pointer is
+ increasing this way, we need to assume 0 object size.
+ E.g. p = &buf[0]; while (cond) p = p + 4; */
+ if (object_size_type & 2)
+ {
+ osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
+ osi.stack = XNEWVEC (unsigned int, num_ssa_names);
+ osi.tos = osi.stack;
+ osi.pass = 1;
+ /* collect_object_sizes_for is changing
+ osi.reexamine bitmap, so iterate over a copy. */
+ bitmap_copy (reexamine, osi.reexamine);
+ EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
+ if (bitmap_bit_p (osi.reexamine, i))
+ check_for_plus_in_loops (&osi, ssa_name (i));
+
+ free (osi.depths);
+ osi.depths = NULL;
+ free (osi.stack);
+ osi.stack = NULL;
+ osi.tos = NULL;
}
- EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
- bitmap_set_bit (computed[object_size_type], i);
- /* Debugging dumps. */
- if (dump_file)
+ do
{
- EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
- if (object_sizes[object_size_type][i]
- != unknown[object_size_type])
+ osi.pass = 2;
+ osi.changed = false;
+ /* collect_object_sizes_for is changing
+ osi.reexamine bitmap, so iterate over a copy. */
+ bitmap_copy (reexamine, osi.reexamine);
+ EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
+ if (bitmap_bit_p (osi.reexamine, i))
{
- print_generic_expr (dump_file, ssa_name (i),
- dump_flags);
- fprintf (dump_file,
- ": %s %sobject size "
- HOST_WIDE_INT_PRINT_UNSIGNED "\n",
- (object_size_type & 2) ? "minimum" : "maximum",
- (object_size_type & 1) ? "sub" : "",
- object_sizes[object_size_type][i]);
+ collect_object_sizes_for (&osi, ssa_name (i));
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file, "Reexamining ");
+ print_generic_expr (dump_file, ssa_name (i),
+ dump_flags);
+ fprintf (dump_file, "\n");
+ }
}
}
+ while (osi.changed);
- BITMAP_FREE (osi.reexamine);
- BITMAP_FREE (osi.visited);
+ BITMAP_FREE (reexamine);
}
+ EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
+ bitmap_set_bit (computed[object_size_type], i);
- return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
+ /* Debugging dumps. */
+ if (dump_file)
+ {
+ EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
+ if (object_sizes[object_size_type][i]
+ != unknown[object_size_type])
+ {
+ print_generic_expr (dump_file, ssa_name (i),
+ dump_flags);
+ fprintf (dump_file,
+ ": %s %sobject size "
+ HOST_WIDE_INT_PRINT_UNSIGNED "\n",
+ (object_size_type & 2) ? "minimum" : "maximum",
+ (object_size_type & 1) ? "sub" : "",
+ object_sizes[object_size_type][i]);
+ }
+ }
+
+ BITMAP_FREE (osi.reexamine);
+ BITMAP_FREE (osi.visited);
}
- return unknown[object_size_type];
+ *psize = object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
+ return *psize != unknown[object_size_type];
}
/* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
|| !POINTER_TYPE_P (TREE_TYPE (value)));
if (TREE_CODE (value) == ADDR_EXPR)
- bytes = addr_object_size (osi, value, object_size_type);
+ addr_object_size (osi, value, object_size_type, &bytes);
else
bytes = unknown[object_size_type];
unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
/* op0 will be ADDR_EXPR here. */
- bytes = addr_object_size (osi, op0, object_size_type);
+ addr_object_size (osi, op0, object_size_type, &bytes);
if (bytes == unknown[object_size_type])
;
else if (off > offset_limit)
&& lhs)
{
tree type = TREE_TYPE (lhs);
- unsigned HOST_WIDE_INT bytes
- = compute_builtin_object_size (ptr, object_size_type);
- if (bytes != (unsigned HOST_WIDE_INT) (object_size_type == 1
- ? -1 : 0)
+ unsigned HOST_WIDE_INT bytes;
+ if (compute_builtin_object_size (ptr, object_size_type,
+ &bytes)
&& wi::fits_to_tree_p (bytes, type))
{
tree tem = make_ssa_name (type);
#define GCC_TREE_OBJECT_SIZE_H
extern void init_object_sizes (void);
-extern unsigned HOST_WIDE_INT compute_builtin_object_size (tree, int);
+extern bool compute_builtin_object_size (tree, int, unsigned HOST_WIDE_INT *);
#endif // GCC_TREE_OBJECT_SIZE_H
if (decl_p)
base_addr = build1 (ADDR_EXPR,
build_pointer_type (TREE_TYPE (base)), base);
- unsigned HOST_WIDE_INT size = compute_builtin_object_size (base_addr, 0);
- if (size != HOST_WIDE_INT_M1U)
+ unsigned HOST_WIDE_INT size;
+ if (compute_builtin_object_size (base_addr, 0, &size))
sizet = build_int_cst (sizetype, size);
else if (optimize)
{