+2017-07-28 Jakub Jelinek <jakub@redhat.com>
+
+ PR sanitizer/80998
+ * sanopt.c (pass_sanopt::execute): Handle IFN_UBSAN_PTR.
+ * tree-ssa-alias.c (call_may_clobber_ref_p_1): Likewise.
+ * flag-types.h (enum sanitize_code): Add SANITIZER_POINTER_OVERFLOW.
+ Or it into SANITIZER_UNDEFINED.
+ * ubsan.c: Include gimple-fold.h and varasm.h.
+ (ubsan_expand_ptr_ifn): New function.
+ (instrument_pointer_overflow): New function.
+ (maybe_instrument_pointer_overflow): New function.
+ (instrument_object_size): Formatting fix.
+ (pass_ubsan::execute): Call instrument_pointer_overflow
+ and maybe_instrument_pointer_overflow.
+ * internal-fn.c (expand_UBSAN_PTR): New function.
+ * ubsan.h (ubsan_expand_ptr_ifn): Declare.
+ * sanitizer.def (__ubsan_handle_pointer_overflow,
+ __ubsan_handle_pointer_overflow_abort): New builtins.
+ * tree-ssa-tail-merge.c (merge_stmts_p): Handle IFN_UBSAN_PTR.
+ * internal-fn.def (UBSAN_PTR): New internal function.
+ * opts.c (sanitizer_opts): Add pointer-overflow.
+ * lto-streamer-in.c (input_function): Handle IFN_UBSAN_PTR.
+ * fold-const.c (build_range_check): Compute pointer range check in
+ integral type if pointer arithmetics would be needed. Formatting
+ fixes.
+
2017-07-28 Martin Liska <mliska@suse.cz>
PR sanitizer/81460
SANITIZE_OBJECT_SIZE = 1UL << 21,
SANITIZE_VPTR = 1UL << 22,
SANITIZE_BOUNDS_STRICT = 1UL << 23,
+ SANITIZE_POINTER_OVERFLOW = 1UL << 24,
SANITIZE_SHIFT = SANITIZE_SHIFT_BASE | SANITIZE_SHIFT_EXPONENT,
SANITIZE_UNDEFINED = SANITIZE_SHIFT | SANITIZE_DIVIDE | SANITIZE_UNREACHABLE
| SANITIZE_VLA | SANITIZE_NULL | SANITIZE_RETURN
| SANITIZE_BOUNDS | SANITIZE_ALIGNMENT
| SANITIZE_NONNULL_ATTRIBUTE
| SANITIZE_RETURNS_NONNULL_ATTRIBUTE
- | SANITIZE_OBJECT_SIZE | SANITIZE_VPTR,
+ | SANITIZE_OBJECT_SIZE | SANITIZE_VPTR
+ | SANITIZE_POINTER_OVERFLOW,
SANITIZE_UNDEFINED_NONDEFAULT = SANITIZE_FLOAT_DIVIDE | SANITIZE_FLOAT_CAST
| SANITIZE_BOUNDS_STRICT
};
if (low == 0)
return fold_build2_loc (loc, LE_EXPR, type, exp,
- fold_convert_loc (loc, etype, high));
+ fold_convert_loc (loc, etype, high));
if (high == 0)
return fold_build2_loc (loc, GE_EXPR, type, exp,
- fold_convert_loc (loc, etype, low));
+ fold_convert_loc (loc, etype, low));
if (operand_equal_p (low, high, 0))
return fold_build2_loc (loc, EQ_EXPR, type, exp,
- fold_convert_loc (loc, etype, low));
+ fold_convert_loc (loc, etype, low));
if (TREE_CODE (exp) == BIT_AND_EXPR
&& maskable_range_p (low, high, etype, &mask, &value))
return fold_build2_loc (loc, EQ_EXPR, type,
fold_build2_loc (loc, BIT_AND_EXPR, etype,
- exp, mask),
+ exp, mask),
value);
if (integer_zerop (low))
exp = fold_convert_loc (loc, etype, exp);
}
return fold_build2_loc (loc, GT_EXPR, type, exp,
- build_int_cst (etype, 0));
+ build_int_cst (etype, 0));
}
}
if (etype == NULL_TREE)
return NULL_TREE;
+ if (POINTER_TYPE_P (etype))
+ etype = unsigned_type_for (etype);
+
high = fold_convert_loc (loc, etype, high);
low = fold_convert_loc (loc, etype, low);
exp = fold_convert_loc (loc, etype, exp);
value = const_binop (MINUS_EXPR, high, low);
-
- if (POINTER_TYPE_P (etype))
- {
- if (value != 0 && !TREE_OVERFLOW (value))
- {
- low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
- return build_range_check (loc, type,
- fold_build_pointer_plus_loc (loc, exp, low),
- 1, build_int_cst (etype, 0), value);
- }
- return 0;
- }
-
if (value != 0 && !TREE_OVERFLOW (value))
return build_range_check (loc, type,
fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
/* This should get expanded in the sanopt pass. */
+static void
+expand_UBSAN_PTR (internal_fn, gcall *)
+{
+ gcc_unreachable ();
+}
+
+/* This should get expanded in the sanopt pass. */
+
static void
expand_UBSAN_OBJECT_SIZE (internal_fn, gcall *)
{
DEF_INTERNAL_FN (UBSAN_CHECK_ADD, ECF_CONST | ECF_LEAF | ECF_NOTHROW, NULL)
DEF_INTERNAL_FN (UBSAN_CHECK_SUB, ECF_CONST | ECF_LEAF | ECF_NOTHROW, NULL)
DEF_INTERNAL_FN (UBSAN_CHECK_MUL, ECF_CONST | ECF_LEAF | ECF_NOTHROW, NULL)
+DEF_INTERNAL_FN (UBSAN_PTR, ECF_LEAF | ECF_NOTHROW, ".R.")
DEF_INTERNAL_FN (UBSAN_OBJECT_SIZE, ECF_LEAF | ECF_NOTHROW, NULL)
DEF_INTERNAL_FN (ABNORMAL_DISPATCHER, ECF_NORETURN, NULL)
DEF_INTERNAL_FN (BUILTIN_EXPECT, ECF_CONST | ECF_LEAF | ECF_NOTHROW, NULL)
if ((flag_sanitize & SANITIZE_OBJECT_SIZE) == 0)
remove = true;
break;
+ case IFN_UBSAN_PTR:
+ if ((flag_sanitize & SANITIZE_POINTER_OVERFLOW) == 0)
+ remove = true;
+ break;
case IFN_ASAN_MARK:
if ((flag_sanitize & SANITIZE_ADDRESS) == 0)
remove = true;
true),
SANITIZER_OPT (object-size, SANITIZE_OBJECT_SIZE, true),
SANITIZER_OPT (vptr, SANITIZE_VPTR, true),
+ SANITIZER_OPT (pointer-overflow, SANITIZE_POINTER_OVERFLOW, true),
SANITIZER_OPT (all, ~0U, true),
#undef SANITIZER_OPT
{ NULL, 0U, 0UL, false }
"__ubsan_handle_load_invalid_value",
BT_FN_VOID_PTR_PTR,
ATTR_COLD_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_UBSAN_HANDLE_POINTER_OVERFLOW,
+ "__ubsan_handle_pointer_overflow",
+ BT_FN_VOID_PTR_PTR_PTR,
+ ATTR_COLD_NOTHROW_LEAF_LIST)
DEF_SANITIZER_BUILTIN(BUILT_IN_UBSAN_HANDLE_DIVREM_OVERFLOW_ABORT,
"__ubsan_handle_divrem_overflow_abort",
BT_FN_VOID_PTR_PTR_PTR,
"__ubsan_handle_load_invalid_value_abort",
BT_FN_VOID_PTR_PTR,
ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_UBSAN_HANDLE_POINTER_OVERFLOW_ABORT,
+ "__ubsan_handle_pointer_overflow_abort",
+ BT_FN_VOID_PTR_PTR_PTR,
+ ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST)
DEF_SANITIZER_BUILTIN(BUILT_IN_UBSAN_HANDLE_FLOAT_CAST_OVERFLOW,
"__ubsan_handle_float_cast_overflow",
BT_FN_VOID_PTR_PTR,
case IFN_UBSAN_OBJECT_SIZE:
no_next = ubsan_expand_objsize_ifn (&gsi);
break;
+ case IFN_UBSAN_PTR:
+ no_next = ubsan_expand_ptr_ifn (&gsi);
+ break;
case IFN_UBSAN_VPTR:
no_next = ubsan_expand_vptr_ifn (&gsi);
break;
2017-07-28 Jakub Jelinek <jakub@redhat.com>
+ PR sanitizer/80998
+ * c-c++-common/ubsan/ptr-overflow-1.c: New test.
+ * c-c++-common/ubsan/ptr-overflow-2.c: New test.
+
PR tree-optimization/81578
* gcc.dg/pr81578.c: New test.
--- /dev/null
+/* PR sanitizer/80998 */
+/* { dg-do run } */
+/* { dg-options "-fsanitize=pointer-overflow -fno-sanitize-recover=pointer-overflow -Wall" } */
+
+struct S { int a; int b; int c[64]; };
+__attribute__((noinline, noclone)) char *f1 (char *p) { return p + 1; }
+__attribute__((noinline, noclone)) char *f2 (char *p) { return p - 1; }
+__attribute__((noinline, noclone)) char *f3 (char *p, int i) { return p + i; }
+__attribute__((noinline, noclone)) char *f4 (char *p, int i) { return p - i; }
+__attribute__((noinline, noclone)) char *f5 (char *p, unsigned long int i) { return p + i; }
+__attribute__((noinline, noclone)) char *f6 (char *p, unsigned long int i) { return p - i; }
+__attribute__((noinline, noclone)) int *f7 (struct S *p) { return &p->a; }
+__attribute__((noinline, noclone)) int *f8 (struct S *p) { return &p->b; }
+__attribute__((noinline, noclone)) int *f9 (struct S *p) { return &p->c[64]; }
+__attribute__((noinline, noclone)) int *f10 (struct S *p, int i) { return &p->c[i]; }
+
+char *volatile p;
+struct S *volatile q;
+char a[64];
+struct S s;
+int *volatile r;
+
+int
+main ()
+{
+ struct S t;
+ p = &a[32];
+ p = f1 (p);
+ p = f1 (p);
+ p = f2 (p);
+ p = f3 (p, 1);
+ p = f3 (p, -1);
+ p = f3 (p, 3);
+ p = f3 (p, -6);
+ p = f4 (p, 1);
+ p = f4 (p, -1);
+ p = f4 (p, 3);
+ p = f4 (p, -6);
+ p = f5 (p, 1);
+ p = f5 (p, 3);
+ p = f6 (p, 1);
+ p = f6 (p, 3);
+ if (sizeof (unsigned long) >= sizeof (char *))
+ {
+ p = f5 (p, -1);
+ p = f5 (p, -6);
+ p = f6 (p, -1);
+ p = f6 (p, -6);
+ }
+ q = &s;
+ r = f7 (q);
+ r = f8 (q);
+ r = f9 (q);
+ r = f10 (q, 0);
+ r = f10 (q, 10);
+ r = f10 (q, 64);
+ q = &t;
+ r = f7 (q);
+ r = f8 (q);
+ r = f9 (q);
+ r = f10 (q, 0);
+ r = f10 (q, 10);
+ r = f10 (q, 64);
+ return 0;
+}
--- /dev/null
+/* PR sanitizer/80998 */
+/* { dg-do run } */
+/* { dg-options "-fsanitize=pointer-overflow -fsanitize-recover=pointer-overflow -fno-ipa-icf -Wall" } */
+
+__attribute__((noinline, noclone)) char * f1 (char *p) { return p + 1; }
+__attribute__((noinline, noclone)) char * f2 (char *p) { return p - 1; }
+__attribute__((noinline, noclone)) char * f3 (char *p, int i) { return p + i; }
+__attribute__((noinline, noclone)) char * f4 (char *p, int i) { return p + i; }
+__attribute__((noinline, noclone)) char * f5 (char *p, int i) { return p - i; }
+__attribute__((noinline, noclone)) char * f6 (char *p, int i) { return p - i; }
+__attribute__((noinline, noclone)) char * f7 (char *p, unsigned long int i) { return p + i; }
+__attribute__((noinline, noclone)) char * f8 (char *p, unsigned long int i) { return p + i; }
+__attribute__((noinline, noclone)) char * f9 (char *p, unsigned long int i) { return p - i; }
+__attribute__((noinline, noclone)) char * f10 (char *p, unsigned long int i) { return p - i; }
+struct S { int a; int b; int c[64]; };
+__attribute__((noinline, noclone)) int *f11 (struct S *p) { return &p->a; }
+__attribute__((noinline, noclone)) int *f12 (struct S *p) { return &p->b; }
+__attribute__((noinline, noclone)) int *f13 (struct S *p) { return &p->c[64]; }
+__attribute__((noinline, noclone)) int *f14 (struct S *p, int i) { return &p->c[i]; }
+__attribute__((noinline, noclone)) int *f15 (struct S *p, int i) { return &p->c[i]; }
+__attribute__((noinline, noclone)) int *f16 (struct S *p) { return &p->a; }
+__attribute__((noinline, noclone)) int *f17 (struct S *p) { return &p->b; }
+__attribute__((noinline, noclone)) int *f18 (struct S *p) { return &p->c[64]; }
+__attribute__((noinline, noclone)) int *f19 (struct S *p, int i) { return &p->c[i]; }
+__attribute__((noinline, noclone)) int *f20 (struct S *p, int i) { return &p->c[i]; }
+__attribute__((noinline, noclone)) int *f21 (struct S *p) { return &p->a; }
+__attribute__((noinline, noclone)) int *f22 (struct S *p) { return &p->b; }
+__attribute__((noinline, noclone)) int *f23 (struct S *p) { return &p->c[64]; }
+__attribute__((noinline, noclone)) int *f24 (struct S *p, int i) { return &p->c[i]; }
+__attribute__((noinline, noclone)) int *f25 (struct S *p, int i) { return &p->c[i]; }
+
+char *volatile p;
+__UINTPTR_TYPE__ volatile u;
+struct S *volatile q;
+int *volatile r;
+
+int
+main ()
+{
+ u = ~(__UINTPTR_TYPE__) 0;
+ p = (char *) u;
+ p = f1 (p);
+ u = 0;
+ p = (char *) u;
+ p = f2 (p);
+ u = -(__UINTPTR_TYPE__) 7;
+ p = (char *) u;
+ p = f3 (p, 7);
+ u = 3;
+ p = (char *) u;
+ p = f4 (p, -4);
+ u = 23;
+ p = (char *) u;
+ p = f5 (p, 27);
+ u = -(__UINTPTR_TYPE__) 15;
+ p = (char *) u;
+ p = f6 (p, -15);
+ u = -(__UINTPTR_TYPE__) 29;
+ p = (char *) u;
+ p = f7 (p, 31);
+ u = 23;
+ p = (char *) u;
+ p = f9 (p, 24);
+ if (sizeof (unsigned long) < sizeof (char *))
+ return 0;
+ u = 7;
+ p = (char *) u;
+ p = f8 (p, -8);
+ u = -(__UINTPTR_TYPE__) 25;
+ p = (char *) u;
+ p = f10 (p, -25);
+ u = ~(__UINTPTR_TYPE__) 0;
+ q = (struct S *) u;
+ r = f11 (q);
+ r = f12 (q);
+ r = f13 (q);
+ r = f14 (q, 0);
+ r = f15 (q, 63);
+ u = ~(__UINTPTR_TYPE__) 0 - (17 * sizeof (int));
+ q = (struct S *) u;
+ r = f16 (q);
+ r = f17 (q);
+ r = f18 (q);
+ r = f19 (q, 0);
+ r = f20 (q, 63);
+ u = 3 * sizeof (int);
+ q = (struct S *) u;
+ r = f21 (q);
+ r = f22 (q);
+ r = f23 (q);
+ r = f24 (q, -2);
+ r = f25 (q, -6);
+ return 0;
+}
+
+/* { dg-output ":5:6\[79]\[^\n\r]*runtime error: pointer index expression with base (0\[xX])?\[fF]\+ overflowed to (0\[xX])?0\+(\n|\r\n|\r)" } */
+/* { dg-output "\[^\n\r]*:6:6\[79]\[^\n\r]*runtime error: pointer index expression with base (0\[xX])?0\+ overflowed to (0\[xX])?\[fF]\+(\n|\r\n|\r)" } */
+/* { dg-output "\[^\n\r]*:7:7\[46]\[^\n\r]*runtime error: pointer index expression with base (0\[xX])?\[fF]\+9 overflowed to (0\[xX])?0\+(\n|\r\n|\r)" } */
+/* { dg-output "\[^\n\r]*:8:7\[46]\[^\n\r]*runtime error: pointer index expression with base (0\[xX])?0\+3 overflowed to (0\[xX])?\[fF]\+(\n|\r\n|\r)" } */
+/* { dg-output "\[^\n\r]*:9:7\[46]\[^\n\r]*runtime error: pointer index expression with base (0\[xX])?0\+17 overflowed to (0\[xX])?\[fF]\+\[cC](\n|\r\n|\r)" } */
+/* { dg-output "\[^\n\r]*:10:7\[46]\[^\n\r]*runtime error: pointer index expression with base (0\[xX])?\[fF]\+1 overflowed to (0\[xX])?0\+(\n|\r\n|\r)" } */
+/* { dg-output "\[^\n\r]*:11:\[89]\[80]\[^\n\r]*runtime error: pointer index expression with base (0\[xX])?\[fF]\+\[eE]3 overflowed to (0\[xX])?0\+2(\n|\r\n|\r)" } */
+/* { dg-output "\[^\n\r]*:13:\[89]\[80]\[^\n\r]*runtime error: pointer index expression with base (0\[xX])?0\+17 overflowed to (0\[xX])?\[fF]\+(\n|\r\n|\r)" } */
+/* { dg-output "\[^\n\r]*:12:\[89]\[80]\[^\n\r]*runtime error: pointer index expression with base (0\[xX])?0\+7 overflowed to (0\[xX])?\[fF]\+(\n|\r\n|\r)" } */
+/* { dg-output "\[^\n\r]*:14:\[89]\[91]\[^\n\r]*runtime error: pointer index expression with base (0\[xX])?\[fF]\+\[eE]7 overflowed to (0\[xX])?0\+" } */
+/* { dg-output "(\n|\r\n|\r)" { target int32 } } */
+/* { dg-output "\[^\n\r]*:17:\[67]\[82]\[^\n\r]*runtime error: pointer index expression with base (0\[xX])?\[fF]\+ overflowed to (0\[xX])?0\+3(\n|\r\n|\r)" { target int32 } } */
+/* { dg-output "\[^\n\r]*:18:\[67]\[86]\[^\n\r]*runtime error: pointer index expression with base (0\[xX])?\[fF]\+ overflowed to (0\[xX])?0\+107(\n|\r\n|\r)" { target int32 } } */
+/* { dg-output "\[^\n\r]*:19:\[78]\[52]\[^\n\r]*runtime error: pointer index expression with base (0\[xX])?\[fF]\+ overflowed to (0\[xX])?0\+7(\n|\r\n|\r)" { target int32 } } */
+/* { dg-output "\[^\n\r]*:20:\[78]\[52]\[^\n\r]*runtime error: pointer index expression with base (0\[xX])?\[fF]\+ overflowed to (0\[xX])?0\+103(\n|\r\n|\r)" { target int32 } } */
+/* { dg-output "\[^\n\r]*:23:\[67]\[86]\[^\n\r]*runtime error: pointer index expression with base (0\[xX])?\[fF]\+\[bB]\[bB] overflowed to (0\[xX])?0\+\[cC]3(\n|\r\n|\r)" { target int32 } } */
+/* { dg-output "\[^\n\r]*:25:\[78]\[52]\[^\n\r]*runtime error: pointer index expression with base (0\[xX])?\[fF]\+\[bB]\[bB] overflowed to (0\[xX])?0\+\[bB]\[fF](\n|\r\n|\r)" { target int32 } } */
+/* { dg-output "\[^\n\r]*:30:\[78]\[52]\[^\n\r]*runtime error: pointer index expression with base (0\[xX])?0\+\[cC] overflowed to (0\[xX])?\[fF]\+\[cC]" { target int32 } } */
case IFN_UBSAN_BOUNDS:
case IFN_UBSAN_VPTR:
case IFN_UBSAN_OBJECT_SIZE:
+ case IFN_UBSAN_PTR:
case IFN_ASAN_CHECK:
return false;
default:
case IFN_UBSAN_CHECK_SUB:
case IFN_UBSAN_CHECK_MUL:
case IFN_UBSAN_OBJECT_SIZE:
+ case IFN_UBSAN_PTR:
case IFN_ASAN_CHECK:
/* For these internal functions, gimple_location is an implicit
parameter, which will be used explicitly after expansion.
#include "builtins.h"
#include "tree-object-size.h"
#include "tree-cfg.h"
+#include "gimple-fold.h"
+#include "varasm.h"
/* Map from a tree to a VAR_DECL tree. */
return true;
}
+/* Expand UBSAN_PTR internal call. */
+
+bool
+ubsan_expand_ptr_ifn (gimple_stmt_iterator *gsip)
+{
+ gimple_stmt_iterator gsi = *gsip;
+ gimple *stmt = gsi_stmt (gsi);
+ location_t loc = gimple_location (stmt);
+ gcc_assert (gimple_call_num_args (stmt) == 2);
+ tree ptr = gimple_call_arg (stmt, 0);
+ tree off = gimple_call_arg (stmt, 1);
+
+ if (integer_zerop (off))
+ {
+ gsi_remove (gsip, true);
+ unlink_stmt_vdef (stmt);
+ return true;
+ }
+
+ basic_block cur_bb = gsi_bb (gsi);
+ tree ptrplusoff = make_ssa_name (pointer_sized_int_node);
+ tree ptri = make_ssa_name (pointer_sized_int_node);
+ int pos_neg = get_range_pos_neg (off);
+
+ /* Split the original block holding the pointer dereference. */
+ edge e = split_block (cur_bb, stmt);
+
+ /* Get a hold on the 'condition block', the 'then block' and the
+ 'else block'. */
+ basic_block cond_bb = e->src;
+ basic_block fallthru_bb = e->dest;
+ basic_block then_bb = create_empty_bb (cond_bb);
+ basic_block cond_pos_bb = NULL, cond_neg_bb = NULL;
+ add_bb_to_loop (then_bb, cond_bb->loop_father);
+ loops_state_set (LOOPS_NEED_FIXUP);
+
+ /* Set up the fallthrough basic block. */
+ e->flags = EDGE_FALSE_VALUE;
+ if (pos_neg != 3)
+ {
+ e->count = cond_bb->count;
+ e->probability = profile_probability::very_likely ();
+
+ /* Connect 'then block' with the 'else block'. This is needed
+ as the ubsan routines we call in the 'then block' are not noreturn.
+ The 'then block' only has one outcoming edge. */
+ make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
+
+ /* Make an edge coming from the 'cond block' into the 'then block';
+ this edge is unlikely taken, so set up the probability
+ accordingly. */
+ e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
+ e->probability = profile_probability::very_unlikely ();
+ }
+ else
+ {
+ profile_count count = cond_bb->count.apply_probability (PROB_EVEN);
+ e->count = count;
+ e->probability = profile_probability::even ();
+
+ e = split_block (fallthru_bb, (gimple *) NULL);
+ cond_neg_bb = e->src;
+ fallthru_bb = e->dest;
+ e->count = count;
+ e->probability = profile_probability::very_likely ();
+ e->flags = EDGE_FALSE_VALUE;
+
+ e = make_edge (cond_neg_bb, then_bb, EDGE_TRUE_VALUE);
+ e->probability = profile_probability::very_unlikely ();
+
+ cond_pos_bb = create_empty_bb (cond_bb);
+ add_bb_to_loop (cond_pos_bb, cond_bb->loop_father);
+
+ e = make_edge (cond_bb, cond_pos_bb, EDGE_TRUE_VALUE);
+ e->count = count;
+ e->probability = profile_probability::even ();
+
+ e = make_edge (cond_pos_bb, then_bb, EDGE_TRUE_VALUE);
+ e->probability = profile_probability::very_unlikely ();
+
+ e = make_edge (cond_pos_bb, fallthru_bb, EDGE_FALSE_VALUE);
+ e->count = count;
+ e->probability = profile_probability::very_likely ();
+
+ make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
+ }
+
+ gimple *g = gimple_build_assign (ptri, NOP_EXPR, ptr);
+ gimple_set_location (g, loc);
+ gsi_insert_before (&gsi, g, GSI_SAME_STMT);
+ g = gimple_build_assign (ptrplusoff, PLUS_EXPR, ptri, off);
+ gimple_set_location (g, loc);
+ gsi_insert_before (&gsi, g, GSI_SAME_STMT);
+
+ /* Update dominance info for the newly created then_bb; note that
+ fallthru_bb's dominance info has already been updated by
+ split_block. */
+ if (dom_info_available_p (CDI_DOMINATORS))
+ {
+ set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
+ if (pos_neg == 3)
+ {
+ set_immediate_dominator (CDI_DOMINATORS, cond_pos_bb, cond_bb);
+ set_immediate_dominator (CDI_DOMINATORS, fallthru_bb, cond_bb);
+ }
+ }
+
+ /* Put the ubsan builtin call into the newly created BB. */
+ if (flag_sanitize_undefined_trap_on_error)
+ g = gimple_build_call (builtin_decl_implicit (BUILT_IN_TRAP), 0);
+ else
+ {
+ enum built_in_function bcode
+ = (flag_sanitize_recover & SANITIZE_POINTER_OVERFLOW)
+ ? BUILT_IN_UBSAN_HANDLE_POINTER_OVERFLOW
+ : BUILT_IN_UBSAN_HANDLE_POINTER_OVERFLOW_ABORT;
+ tree fn = builtin_decl_implicit (bcode);
+ tree data
+ = ubsan_create_data ("__ubsan_ptrovf_data", 1, &loc,
+ NULL_TREE, NULL_TREE);
+ data = build_fold_addr_expr_loc (loc, data);
+ g = gimple_build_call (fn, 3, data, ptr, ptrplusoff);
+ }
+ gimple_stmt_iterator gsi2 = gsi_start_bb (then_bb);
+ gimple_set_location (g, loc);
+ gsi_insert_after (&gsi2, g, GSI_NEW_STMT);
+
+ /* Unlink the UBSAN_PTRs vops before replacing it. */
+ unlink_stmt_vdef (stmt);
+
+ if (TREE_CODE (off) == INTEGER_CST)
+ g = gimple_build_cond (wi::neg_p (off) ? LT_EXPR : GE_EXPR, ptri,
+ fold_build1 (NEGATE_EXPR, sizetype, off),
+ NULL_TREE, NULL_TREE);
+ else if (pos_neg != 3)
+ g = gimple_build_cond (pos_neg == 1 ? LT_EXPR : GT_EXPR,
+ ptrplusoff, ptri, NULL_TREE, NULL_TREE);
+ else
+ {
+ gsi2 = gsi_start_bb (cond_pos_bb);
+ g = gimple_build_cond (LT_EXPR, ptrplusoff, ptri, NULL_TREE, NULL_TREE);
+ gimple_set_location (g, loc);
+ gsi_insert_after (&gsi2, g, GSI_NEW_STMT);
+
+ gsi2 = gsi_start_bb (cond_neg_bb);
+ g = gimple_build_cond (GT_EXPR, ptrplusoff, ptri, NULL_TREE, NULL_TREE);
+ gimple_set_location (g, loc);
+ gsi_insert_after (&gsi2, g, GSI_NEW_STMT);
+
+ gimple_seq seq = NULL;
+ tree t = gimple_build (&seq, loc, NOP_EXPR, ssizetype, off);
+ t = gimple_build (&seq, loc, GE_EXPR, boolean_type_node,
+ t, ssize_int (0));
+ gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
+ g = gimple_build_cond (NE_EXPR, t, boolean_false_node,
+ NULL_TREE, NULL_TREE);
+ }
+ gimple_set_location (g, loc);
+ /* Replace the UBSAN_PTR with a GIMPLE_COND stmt. */
+ gsi_replace (&gsi, g, false);
+ return false;
+}
+
+
/* Cached __ubsan_vptr_type_cache decl. */
static GTY(()) tree ubsan_vptr_type_cache_decl;
instrument_mem_ref (t, base, &gsi, is_lhs);
}
+/* Instrument pointer arithmetics PTR p+ OFF. */
+
+static void
+instrument_pointer_overflow (gimple_stmt_iterator *gsi, tree ptr, tree off)
+{
+ if (TYPE_PRECISION (sizetype) != POINTER_SIZE)
+ return;
+ gcall *g = gimple_build_call_internal (IFN_UBSAN_PTR, 2, ptr, off);
+ gimple_set_location (g, gimple_location (gsi_stmt (*gsi)));
+ gsi_insert_before (gsi, g, GSI_SAME_STMT);
+}
+
+/* Instrument pointer arithmetics if any. */
+
+static void
+maybe_instrument_pointer_overflow (gimple_stmt_iterator *gsi, tree t)
+{
+ if (TYPE_PRECISION (sizetype) != POINTER_SIZE)
+ return;
+
+ /* Handle also e.g. &s->i. */
+ if (TREE_CODE (t) == ADDR_EXPR)
+ t = TREE_OPERAND (t, 0);
+
+ if (!handled_component_p (t) && TREE_CODE (t) != MEM_REF)
+ return;
+
+ HOST_WIDE_INT bitsize, bitpos, bytepos;
+ tree offset;
+ machine_mode mode;
+ int volatilep = 0, reversep, unsignedp = 0;
+ tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
+ &unsignedp, &reversep, &volatilep);
+ tree moff = NULL_TREE;
+
+ bool decl_p = DECL_P (inner);
+ tree base;
+ if (decl_p)
+ {
+ if (DECL_REGISTER (inner))
+ return;
+ base = inner;
+ /* If BASE is a fixed size automatic variable or
+ global variable defined in the current TU and bitpos
+ fits, don't instrument anything. */
+ if (offset == NULL_TREE
+ && bitpos > 0
+ && (VAR_P (base)
+ || TREE_CODE (base) == PARM_DECL
+ || TREE_CODE (base) == RESULT_DECL)
+ && DECL_SIZE (base)
+ && TREE_CODE (DECL_SIZE (base)) == INTEGER_CST
+ && compare_tree_int (DECL_SIZE (base), bitpos) >= 0
+ && (!is_global_var (base) || decl_binds_to_current_def_p (base)))
+ return;
+ }
+ else if (TREE_CODE (inner) == MEM_REF)
+ {
+ base = TREE_OPERAND (inner, 0);
+ if (TREE_CODE (base) == ADDR_EXPR
+ && DECL_P (TREE_OPERAND (base, 0))
+ && !TREE_ADDRESSABLE (TREE_OPERAND (base, 0))
+ && !is_global_var (TREE_OPERAND (base, 0)))
+ return;
+ moff = TREE_OPERAND (inner, 1);
+ if (integer_zerop (moff))
+ moff = NULL_TREE;
+ }
+ else
+ return;
+
+ if (!POINTER_TYPE_P (TREE_TYPE (base)) && !DECL_P (base))
+ return;
+ bytepos = bitpos / BITS_PER_UNIT;
+ if (offset == NULL_TREE && bytepos == 0 && moff == NULL_TREE)
+ return;
+
+ tree base_addr = base;
+ if (decl_p)
+ base_addr = build1 (ADDR_EXPR,
+ build_pointer_type (TREE_TYPE (base)), base);
+ t = offset;
+ if (bytepos)
+ {
+ if (t)
+ t = fold_build2 (PLUS_EXPR, TREE_TYPE (t), t,
+ build_int_cst (TREE_TYPE (t), bytepos));
+ else
+ t = size_int (bytepos);
+ }
+ if (moff)
+ {
+ if (t)
+ t = fold_build2 (PLUS_EXPR, TREE_TYPE (t), t,
+ fold_convert (TREE_TYPE (t), moff));
+ else
+ t = fold_convert (sizetype, moff);
+ }
+ t = force_gimple_operand_gsi (gsi, t, true, NULL_TREE, true,
+ GSI_SAME_STMT);
+ base_addr = force_gimple_operand_gsi (gsi, base_addr, true, NULL_TREE, true,
+ GSI_SAME_STMT);
+ instrument_pointer_overflow (gsi, base_addr, t);
+}
+
/* Build an ubsan builtin call for the signed-integer-overflow
sanitization. CODE says what kind of builtin are we building,
LOC is a location, LHSTYPE is the type of LHS, OP0 and OP1
{
tree rhs1 = gimple_assign_rhs1 (def_stmt);
if (TREE_CODE (rhs1) == SSA_NAME
- && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1))
+ && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1))
break;
else
base = rhs1;
| SANITIZE_ALIGNMENT
| SANITIZE_NONNULL_ATTRIBUTE
| SANITIZE_RETURNS_NONNULL_ATTRIBUTE
- | SANITIZE_OBJECT_SIZE));
+ | SANITIZE_OBJECT_SIZE
+ | SANITIZE_POINTER_OVERFLOW));
}
virtual unsigned int execute (function *);
}
}
+ if (sanitize_flags_p (SANITIZE_POINTER_OVERFLOW, fun->decl))
+ {
+ if (is_gimple_assign (stmt)
+ && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
+ instrument_pointer_overflow (&gsi,
+ gimple_assign_rhs1 (stmt),
+ gimple_assign_rhs2 (stmt));
+ if (gimple_store_p (stmt))
+ maybe_instrument_pointer_overflow (&gsi,
+ gimple_get_lhs (stmt));
+ if (gimple_assign_single_p (stmt))
+ maybe_instrument_pointer_overflow (&gsi,
+ gimple_assign_rhs1 (stmt));
+ if (is_gimple_call (stmt))
+ {
+ unsigned args_num = gimple_call_num_args (stmt);
+ for (unsigned i = 0; i < args_num; ++i)
+ {
+ tree arg = gimple_call_arg (stmt, i);
+ if (is_gimple_reg (arg))
+ continue;
+ maybe_instrument_pointer_overflow (&gsi, arg);
+ }
+ }
+ }
+
gsi_next (&gsi);
}
if (gimple_purge_dead_eh_edges (bb))
extern bool ubsan_expand_bounds_ifn (gimple_stmt_iterator *);
extern bool ubsan_expand_null_ifn (gimple_stmt_iterator *);
extern bool ubsan_expand_objsize_ifn (gimple_stmt_iterator *);
+extern bool ubsan_expand_ptr_ifn (gimple_stmt_iterator *);
extern bool ubsan_expand_vptr_ifn (gimple_stmt_iterator *);
extern bool ubsan_instrument_unreachable (gimple_stmt_iterator *);
extern tree ubsan_create_data (const char *, int, const location_t *, ...);
+2017-07-28 Jakub Jelinek <jakub@redhat.com>
+
+ PR sanitizer/80998
+ * ubsan/ubsan_handlers.cc: Cherry-pick upstream r304461.
+ * ubsan/ubsan_checks.inc: Likewise.
+ * ubsan/ubsan_handlers.h: Likewise.
+
2017-07-14 Jakub Jelinek <jakub@redhat.com>
PR sanitizer/81066
UBSAN_CHECK(GenericUB, "undefined-behavior", "undefined")
UBSAN_CHECK(NullPointerUse, "null-pointer-use", "null")
+UBSAN_CHECK(PointerOverflow, "pointer-overflow", "pointer-overflow")
UBSAN_CHECK(MisalignedPointerUse, "misaligned-pointer-use", "alignment")
UBSAN_CHECK(InsufficientObjectSize, "insufficient-object-size", "object-size")
UBSAN_CHECK(SignedIntegerOverflow, "signed-integer-overflow",
Die();
}
+static void handlePointerOverflowImpl(PointerOverflowData *Data,
+ ValueHandle Base,
+ ValueHandle Result,
+ ReportOptions Opts) {
+ SourceLocation Loc = Data->Loc.acquire();
+ ErrorType ET = ErrorType::PointerOverflow;
+
+ if (ignoreReport(Loc, Opts, ET))
+ return;
+
+ ScopedReport R(Opts, Loc, ET);
+
+ Diag(Loc, DL_Error, "pointer index expression with base %0 overflowed to %1")
+ << (void *)Base << (void*)Result;
+}
+
+void __ubsan::__ubsan_handle_pointer_overflow(PointerOverflowData *Data,
+ ValueHandle Base,
+ ValueHandle Result) {
+ GET_REPORT_OPTIONS(false);
+ handlePointerOverflowImpl(Data, Base, Result, Opts);
+}
+
+void __ubsan::__ubsan_handle_pointer_overflow_abort(PointerOverflowData *Data,
+ ValueHandle Base,
+ ValueHandle Result) {
+ GET_REPORT_OPTIONS(true);
+ handlePointerOverflowImpl(Data, Base, Result, Opts);
+ Die();
+}
+
static void handleCFIBadIcall(CFICheckFailData *Data, ValueHandle Function,
ReportOptions Opts) {
if (Data->CheckKind != CFITCK_ICall)
/// \brief Handle passing null pointer to function with nonnull attribute.
RECOVERABLE(nonnull_arg, NonNullArgData *Data)
+struct PointerOverflowData {
+ SourceLocation Loc;
+};
+
+RECOVERABLE(pointer_overflow, PointerOverflowData *Data, ValueHandle Base,
+ ValueHandle Result)
+
/// \brief Known CFI check kinds.
/// Keep in sync with the enum of the same name in CodeGenFunction.h
enum CFITypeCheckKind : unsigned char {