/* AddressSanitizer, a fast memory error detector.
- Copyright (C) 2012-2015 Free Software Foundation, Inc.
+ Copyright (C) 2012-2016 Free Software Foundation, Inc.
Contributed by Kostya Serebryany <kcc@google.com>
This file is part of GCC.
#include "config.h"
#include "system.h"
#include "coretypes.h"
-#include "hash-set.h"
-#include "machmode.h"
-#include "vec.h"
-#include "double-int.h"
-#include "input.h"
-#include "alias.h"
-#include "symtab.h"
-#include "options.h"
-#include "wide-int.h"
-#include "inchash.h"
+#include "backend.h"
+#include "target.h"
+#include "rtl.h"
#include "tree.h"
+#include "gimple.h"
+#include "cfghooks.h"
+#include "alloc-pool.h"
+#include "tree-pass.h"
+#include "tm_p.h"
+#include "stringpool.h"
+#include "tree-ssanames.h"
+#include "optabs.h"
+#include "emit-rtl.h"
+#include "cgraph.h"
+#include "gimple-pretty-print.h"
+#include "alias.h"
#include "fold-const.h"
-#include "hash-table.h"
-#include "predict.h"
-#include "tm.h"
-#include "hard-reg-set.h"
-#include "function.h"
-#include "dominance.h"
-#include "cfg.h"
#include "cfganal.h"
-#include "basic-block.h"
-#include "tree-ssa-alias.h"
-#include "internal-fn.h"
-#include "gimple-expr.h"
-#include "is-a.h"
-#include "gimple.h"
#include "gimplify.h"
#include "gimple-iterator.h"
-#include "calls.h"
#include "varasm.h"
#include "stor-layout.h"
#include "tree-iterator.h"
-#include "hash-map.h"
-#include "plugin-api.h"
-#include "ipa-ref.h"
-#include "cgraph.h"
-#include "stringpool.h"
-#include "tree-ssanames.h"
-#include "tree-pass.h"
#include "asan.h"
-#include "gimple-pretty-print.h"
-#include "target.h"
-#include "hashtab.h"
-#include "rtl.h"
-#include "flags.h"
-#include "statistics.h"
-#include "real.h"
-#include "fixed-value.h"
-#include "insn-config.h"
-#include "expmed.h"
#include "dojump.h"
#include "explow.h"
-#include "emit-rtl.h"
-#include "stmt.h"
#include "expr.h"
-#include "insn-codes.h"
-#include "optabs.h"
#include "output.h"
-#include "tm_p.h"
#include "langhooks.h"
-#include "alloc-pool.h"
#include "cfgloop.h"
#include "gimple-builder.h"
#include "ubsan.h"
#include "params.h"
#include "builtins.h"
+#include "fnmatch.h"
/* AddressSanitizer finds out-of-bounds and use-after-free bugs
with <2x slowdown on average.
where '(...){n}' means the content inside the parenthesis occurs 'n'
times, with 'n' being the number of variables on the stack.
-
+
3/ The following 8 bytes contain the PC of the current function which
will be used by the run-time library to print an error message.
static unsigned HOST_WIDE_INT asan_shadow_offset_value;
static bool asan_shadow_offset_computed;
+static vec<char *> sanitized_sections;
/* Sets shadow offset to value in string VAL. */
set_asan_shadow_offset (const char *val)
{
char *endp;
-
+
errno = 0;
#ifdef HAVE_LONG_LONG
asan_shadow_offset_value = strtoull (val, &endp, 0);
return true;
}
+/* Set list of user-defined sections that need to be sanitized. */
+
+void
+set_sanitized_sections (const char *sections)
+{
+ char *pat;
+ unsigned i;
+ FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
+ free (pat);
+ sanitized_sections.truncate (0);
+
+ for (const char *s = sections; *s; )
+ {
+ const char *end;
+ for (end = s; *end && *end != ','; ++end);
+ size_t len = end - s;
+ sanitized_sections.safe_push (xstrndup (s, len));
+ s = *end ? end + 1 : end;
+ }
+}
+
+/* Checks whether section SEC should be sanitized. */
+
+static bool
+section_sanitized_p (const char *sec)
+{
+ char *pat;
+ unsigned i;
+ FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
+ if (fnmatch (pat, sec, FNM_PERIOD) == 0)
+ return true;
+ return false;
+}
+
/* Returns Asan shadow offset. */
static unsigned HOST_WIDE_INT
HOST_WIDE_INT access_size;
};
-static alloc_pool asan_mem_ref_alloc_pool;
-
-/* This creates the alloc pool used to store the instances of
- asan_mem_ref that are stored in the hash table asan_mem_ref_ht. */
-
-static alloc_pool
-asan_mem_ref_get_alloc_pool ()
-{
- if (asan_mem_ref_alloc_pool == NULL)
- asan_mem_ref_alloc_pool = create_alloc_pool ("asan_mem_ref",
- sizeof (asan_mem_ref),
- 10);
- return asan_mem_ref_alloc_pool;
-
-}
+object_allocator <asan_mem_ref> asan_mem_ref_pool ("asan_mem_ref");
/* Initializes an instance of asan_mem_ref. */
static asan_mem_ref*
asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
{
- asan_mem_ref *ref =
- (asan_mem_ref *) pool_alloc (asan_mem_ref_get_alloc_pool ());
+ asan_mem_ref *ref = asan_mem_ref_pool.allocate ();
asan_mem_ref_init (ref, start, access_size);
return ref;
return asan_mem_ref_get_end (ref->start, len);
}
-struct asan_mem_ref_hasher
- : typed_noop_remove <asan_mem_ref>
+struct asan_mem_ref_hasher : nofree_ptr_hash <asan_mem_ref>
{
- typedef asan_mem_ref value_type;
- typedef asan_mem_ref compare_type;
-
- static inline hashval_t hash (const value_type *);
- static inline bool equal (const value_type *, const compare_type *);
+ static inline hashval_t hash (const asan_mem_ref *);
+ static inline bool equal (const asan_mem_ref *, const asan_mem_ref *);
};
/* Hash a memory reference. */
delete asan_mem_ref_ht;
asan_mem_ref_ht = NULL;
- if (asan_mem_ref_alloc_pool)
- {
- free_alloc_pool (asan_mem_ref_alloc_pool);
- asan_mem_ref_alloc_pool = NULL;
- }
+ asan_mem_ref_pool.release ();
}
/* Return true iff the memory reference REF has been instrumented. */
contains. */
static bool
-has_stmt_been_instrumented_p (gimple stmt)
+has_stmt_been_instrumented_p (gimple *stmt)
{
if (gimple_assign_single_p (stmt))
{
snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
use_after_return_class);
ret = init_one_libfunc (buf);
- rtx addr = convert_memory_address (ptr_mode, base);
- ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 2,
+ ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 1,
GEN_INT (asan_frame_size
+ base_align_bias),
- TYPE_MODE (pointer_sized_int_node),
- addr, ptr_mode);
+ TYPE_MODE (pointer_sized_int_node));
+ /* __asan_stack_malloc_[n] returns a pointer to fake stack if succeeded
+ and NULL otherwise. Check RET value is NULL here and jump over the
+ BASE reassignment in this case. Otherwise, reassign BASE to RET. */
+ int very_unlikely = REG_BR_PROB_BASE / 2000 - 1;
+ emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
+ VOIDmode, 0, lab, very_unlikely);
ret = convert_memory_address (Pmode, ret);
emit_move_insn (base, ret);
emit_label (lab);
to be an array of such vars, putting padding in there
breaks this assumption. */
|| (DECL_SECTION_NAME (decl) != NULL
- && !symtab_node::get (decl)->implicit_section)
+ && !symtab_node::get (decl)->implicit_section
+ && !section_sanitized_p (DECL_SECTION_NAME (decl)))
|| DECL_SIZE (decl) == 0
|| ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
|| !valid_constant_size_p (DECL_SIZE_UNIT (decl))
{
tree t, uintptr_type = TREE_TYPE (base_addr);
tree shadow_type = TREE_TYPE (shadow_ptr_type);
- gimple g;
+ gimple *g;
t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
{
if (TREE_CODE (base) == SSA_NAME)
return base;
- gimple g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)),
+ gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)),
TREE_CODE (base), base);
gimple_set_location (g, loc);
if (before_p)
{
if (ptrofftype_p (len))
return len;
- gimple g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
+ gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
NOP_EXPR, len);
gimple_set_location (g, loc);
if (before_p)
bool is_scalar_access, unsigned int align = 0)
{
gimple_stmt_iterator gsi = *iter;
- gimple g;
+ gimple *g;
gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
HOST_WIDE_INT bitsize, bitpos;
tree offset;
machine_mode mode;
- int volatilep = 0, unsignedp = 0;
- tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep, false);
+ int unsignedp, reversep, volatilep = 0;
+ tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
+ &unsignedp, &reversep, &volatilep, false);
if (TREE_CODE (t) == COMPONENT_REF
&& DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
static bool
maybe_instrument_assignment (gimple_stmt_iterator *iter)
{
- gimple s = gsi_stmt (*iter);
+ gimple *s = gsi_stmt (*iter);
gcc_assert (gimple_assign_single_p (s));
is_store);
is_instrumented = true;
}
-
+
if (gimple_assign_load_p (s))
{
ref_expr = gimple_assign_rhs1 (s);
static bool
maybe_instrument_call (gimple_stmt_iterator *iter)
{
- gimple stmt = gsi_stmt (*iter);
+ gimple *stmt = gsi_stmt (*iter);
bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
if (is_builtin && instrument_builtin_call (iter))
}
}
tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
- gimple g = gimple_build_call (decl, 0);
+ gimple *g = gimple_build_call (decl, 0);
gimple_set_location (g, gimple_location (stmt));
gsi_insert_before (iter, g, GSI_SAME_STMT);
}
for (i = gsi_start_bb (bb); !gsi_end_p (i);)
{
- gimple s = gsi_stmt (i);
+ gimple *s = gsi_stmt (i);
if (has_stmt_been_instrumented_p (s))
gsi_next (&i);
{
tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
+ fn = builtin_decl_implicit (BUILT_IN_ASAN_VERSION_MISMATCH_CHECK);
+ append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
}
FOR_EACH_DEFINED_VARIABLE (vnode)
if (TREE_ASM_WRITTEN (vnode->decl)
bool
asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
{
- gimple g = gsi_stmt (*iter);
+ gimple *g = gsi_stmt (*iter);
location_t loc = gimple_location (g);
-
- bool recover_p
- = (flag_sanitize & flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
+ bool recover_p;
+ if (flag_sanitize & SANITIZE_USER_ADDRESS)
+ recover_p = (flag_sanitize_recover & SANITIZE_USER_ADDRESS) != 0;
+ else
+ recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
gcc_assert (flags < ASAN_CHECK_LAST);
if (use_calls)
{
/* Instrument using callbacks. */
- gimple g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
+ gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
NOP_EXPR, base);
gimple_set_location (g, loc);
gsi_insert_before (iter, g, GSI_SAME_STMT);
& ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
shadow_ptr_type);
- gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
+ gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
gimple_seq seq = NULL;
gimple_seq_add_stmt (&seq, shadow_test);
/* Aligned (>= 8 bytes) can test just
tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
shadow_ptr_type);
- gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
+ gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
gimple_seq seq = NULL;
gimple_seq_add_stmt (&seq, shadow_test);
gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,