+2016-11-07 Martin Liska <mliska@suse.cz>
+
+ * asan.c (enum asan_check_flags): Move the enum to header file.
+ (asan_init_shadow_ptr_types): Make type creation more generic.
+ (shadow_mem_size): New function.
+ (asan_emit_stack_protection): Use newly added ASAN_SHADOW_GRANULARITY.
+ Rewritten stack unpoisoning code.
+ (build_shadow_mem_access): Add new argument return_address.
+ (instrument_derefs): Instrument local variables if use after scope
+ sanitization is enabled.
+ (asan_store_shadow_bytes): New function.
+ (asan_expand_mark_ifn): Likewise.
+ (asan_sanitize_stack_p): Moved from asan_sanitize_stack_p.
+ * asan.h (enum asan_mark_flags): Moved here from asan.c
+ (asan_protect_stack_decl): Protect all declaration that need
+ to live in memory.
+ (asan_sanitize_use_after_scope): New function.
+ (asan_no_sanitize_address_p): Likewise.
+ * cfgexpand.c (partition_stack_vars): Consider
+ asan_sanitize_use_after_scope in condition.
+ (expand_stack_vars): Likewise.
+ * common.opt (-fsanitize-address-use-after-scope): New option.
+ * doc/invoke.texi (use-after-scope-direct-emission-threshold):
+ Explain the parameter.
+ * flag-types.h (enum sanitize_code): Define SANITIZE_USE_AFTER_SCOPE.
+ * gimplify.c (build_asan_poison_call_expr): New function.
+ (asan_poison_variable): Likewise.
+ (gimplify_bind_expr): Generate poisoning/unpoisoning for local
+ variables that have address taken.
+ (gimplify_decl_expr): Likewise.
+ (gimplify_target_expr): Likewise for C++ temporaries.
+ (sort_by_decl_uid): New function.
+ (gimplify_expr): Unpoison all variables for a label we can jump
+ from outside of a scope.
+ (gimplify_switch_expr): Unpoison variables defined in the switch
+ context.
+ (gimplify_function_tree): Clear asan_poisoned_variables.
+ (asan_poison_variables): New function.
+ (warn_switch_unreachable_r): Handle IFN_ASAN_MARK.
+ * internal-fn.c (expand_ASAN_MARK): New function.
+ * internal-fn.def (ASAN_MARK): Declare.
+ * opts.c (finish_options): Handle -fstack-reuse if
+ -fsanitize-address-use-after-scope is enabled.
+ (common_handle_option): Enable address sanitization if
+ -fsanitize-address-use-after-scope is enabled.
+ * params.def (PARAM_USE_AFTER_SCOPE_DIRECT_EMISSION_THRESHOLD):
+ New parameter.
+ * params.h: Likewise.
+ * sancov.c (pass_sanopt::execute): Handle IFN_ASAN_MARK.
+ * sanitizer.def: Define __asan_poison_stack_memory and
+ __asan_unpoison_stack_memory functions.
+ * asan.c (asan_mark_poison_p): New function.
+ (transform_statements): Handle asan_mark_poison_p calls.
+ * gimple.c (nonfreeing_call_p): Handle IFN_ASAN_MARK.
+
2016-11-07 Tamar Christina <tamar.christina@arm.com>
PR driver/78196
static bool asan_shadow_offset_computed;
static vec<char *> sanitized_sections;
+/* Return true if STMT is ASAN_MARK poisoning internal function call. */
+static inline bool
+asan_mark_poison_p (gimple *stmt)
+{
+ return (gimple_call_internal_p (stmt, IFN_ASAN_MARK)
+ && tree_to_uhwi (gimple_call_arg (stmt, 0)) == ASAN_MARK_CLOBBER);
+
+}
+
+/* Set of variable declarations that are going to be guarded by
+ use-after-scope sanitizer. */
+
+static hash_set<tree> *asan_handled_variables = NULL;
+
+hash_set <tree> *asan_used_labels = NULL;
+
/* Sets shadow offset to value in string VAL. */
bool
}
}
+bool
+asan_sanitize_stack_p (void)
+{
+ return ((flag_sanitize & SANITIZE_ADDRESS)
+ && ASAN_STACK
+ && !asan_no_sanitize_address_p ());
+}
+
/* Checks whether section SEC should be sanitized. */
static bool
alias_set_type asan_shadow_set = -1;
-/* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
+/* Pointer types to 1, 2 or 4 byte integers in shadow memory. A separate
alias set is used for all shadow memory accesses. */
-static GTY(()) tree shadow_ptr_types[2];
+static GTY(()) tree shadow_ptr_types[3];
/* Decl for __asan_option_detect_stack_use_after_return. */
static GTY(()) tree asan_detect_stack_use_after_return;
-/* Various flags for Asan builtins. */
-enum asan_check_flags
-{
- ASAN_CHECK_STORE = 1 << 0,
- ASAN_CHECK_SCALAR_ACCESS = 1 << 1,
- ASAN_CHECK_NON_ZERO_LEN = 1 << 2,
- ASAN_CHECK_LAST = 1 << 3
-};
-
/* Hashtable support for memory references used by gimple
statements. */
asan_init_shadow_ptr_types (void)
{
asan_shadow_set = new_alias_set ();
- shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
- TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
- shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
- shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
- TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
- shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
+ tree types[3] = { signed_char_type_node, short_integer_type_node,
+ integer_type_node };
+
+ for (unsigned i = 0; i < 3; i++)
+ {
+ shadow_ptr_types[i] = build_distinct_type_copy (types[i]);
+ TYPE_ALIAS_SET (shadow_ptr_types[i]) = asan_shadow_set;
+ shadow_ptr_types[i] = build_pointer_type (shadow_ptr_types[i]);
+ }
+
initialize_sanitizer_builtins ();
}
current_function_funcdef_no);
}
+/* Return number of shadow bytes that are occupied by a local variable
+ of SIZE bytes. */
+
+static unsigned HOST_WIDE_INT
+shadow_mem_size (unsigned HOST_WIDE_INT size)
+{
+ return ROUND_UP (size, ASAN_SHADOW_GRANULARITY) / ASAN_SHADOW_GRANULARITY;
+}
+
/* Insert code to protect stack vars. The prologue sequence should be emitted
directly, epilogue sequence returned. BASE is the register holding the
stack base, against which OFFSETS array offsets are relative to, OFFSETS
HOST_WIDE_INT base_offset = offsets[length - 1];
HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
- HOST_WIDE_INT last_offset, last_size;
+ HOST_WIDE_INT last_offset;
int l;
unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
tree str_cst, decl, id;
(aoff - prev_offset)
>> ASAN_SHADOW_SHIFT);
prev_offset = aoff;
- for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
+ for (i = 0; i < 4; i++, aoff += ASAN_SHADOW_GRANULARITY)
if (aoff < offset)
{
- if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
+ if (aoff < offset - (HOST_WIDE_INT)ASAN_SHADOW_GRANULARITY + 1)
shadow_bytes[i] = 0;
else
shadow_bytes[i] = offset - aoff;
if (STRICT_ALIGNMENT)
set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
- prev_offset = base_offset;
+ /* Unpoison shadow memory of a stack at the very end of a function.
+ As we're poisoning stack variables at the end of their scope,
+ shadow memory must be properly unpoisoned here. The easiest approach
+ would be to collect all variables that should not be unpoisoned and
+ we unpoison shadow memory of the whole stack except ranges
+ occupied by these variables. */
last_offset = base_offset;
- last_size = 0;
- for (l = length; l; l -= 2)
+ HOST_WIDE_INT current_offset = last_offset;
+ if (length)
{
- offset = base_offset + ((offsets[l - 1] - base_offset)
- & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
- if (last_offset + last_size != offset)
+ HOST_WIDE_INT var_end_offset = 0;
+ HOST_WIDE_INT stack_start = offsets[length - 1];
+ gcc_assert (last_offset == stack_start);
+
+ for (int l = length - 2; l > 0; l -= 2)
{
- shadow_mem = adjust_address (shadow_mem, VOIDmode,
- (last_offset - prev_offset)
- >> ASAN_SHADOW_SHIFT);
- prev_offset = last_offset;
- asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
- last_offset = offset;
- last_size = 0;
+ HOST_WIDE_INT var_offset = offsets[l];
+ current_offset = var_offset;
+ var_end_offset = offsets[l - 1];
+ HOST_WIDE_INT rounded_size = ROUND_UP (var_end_offset - var_offset,
+ BITS_PER_UNIT);
+
+ /* Should we unpoison the variable? */
+ if (asan_handled_variables != NULL
+ && asan_handled_variables->contains (decl))
+ {
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ const char *n = (DECL_NAME (decl)
+ ? IDENTIFIER_POINTER (DECL_NAME (decl))
+ : "<unknown>");
+ fprintf (dump_file, "Unpoisoning shadow stack for variable: "
+ "%s (%" PRId64 "B)\n", n,
+ var_end_offset - var_offset);
+ }
+
+ unsigned HOST_WIDE_INT s
+ = shadow_mem_size (current_offset - last_offset);
+ asan_clear_shadow (shadow_mem, s);
+ HOST_WIDE_INT shift
+ = shadow_mem_size (current_offset - last_offset + rounded_size);
+ shadow_mem = adjust_address (shadow_mem, VOIDmode, shift);
+ last_offset = var_offset + rounded_size;
+ current_offset = last_offset;
+ }
+
}
- last_size += base_offset + ((offsets[l - 2] - base_offset)
- & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
- - offset;
- }
- if (last_size)
- {
- shadow_mem = adjust_address (shadow_mem, VOIDmode,
- (last_offset - prev_offset)
- >> ASAN_SHADOW_SHIFT);
- asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
+
+ /* Handle last redzone. */
+ current_offset = offsets[0];
+ asan_clear_shadow (shadow_mem,
+ shadow_mem_size (current_offset - last_offset));
}
+ /* Clean-up set with instrumented stack variables. */
+ delete asan_handled_variables;
+ asan_handled_variables = NULL;
+ delete asan_used_labels;
+ asan_used_labels = NULL;
+
do_pending_stack_adjust ();
if (lab)
emit_label (lab);
gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
}
-/* Build
- (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset (). */
+/* Build (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset ().
+ If RETURN_ADDRESS is set to true, return memory location instread
+ of a value in the shadow memory. */
static tree
build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
- tree base_addr, tree shadow_ptr_type)
+ tree base_addr, tree shadow_ptr_type,
+ bool return_address = false)
{
tree t, uintptr_type = TREE_TYPE (base_addr);
tree shadow_type = TREE_TYPE (shadow_ptr_type);
gimple_set_location (g, location);
gsi_insert_after (gsi, g, GSI_NEW_STMT);
- t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
- build_int_cst (shadow_ptr_type, 0));
- g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
- gimple_set_location (g, location);
- gsi_insert_after (gsi, g, GSI_NEW_STMT);
+ if (!return_address)
+ {
+ t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
+ build_int_cst (shadow_ptr_type, 0));
+ g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
+ gimple_set_location (g, location);
+ gsi_insert_after (gsi, g, GSI_NEW_STMT);
+ }
+
return gimple_assign_lhs (g);
}
{
/* Automatic vars in the current function will be always
accessible. */
- if (decl_function_context (inner) == current_function_decl)
+ if (decl_function_context (inner) == current_function_decl
+ && (!asan_sanitize_use_after_scope ()
+ || !TREE_ADDRESSABLE (inner)))
return;
}
/* Always instrument external vars, they might be dynamically
If the current instruction is a function call that
might free something, let's forget about the memory
references that got instrumented. Otherwise we might
- miss some instrumentation opportunities. */
- if (is_gimple_call (s) && !nonfreeing_call_p (s))
+ miss some instrumentation opportunities. Do the same
+ for a ASAN_MARK poisoning internal function. */
+ if (is_gimple_call (s)
+ && (!nonfreeing_call_p (s) || asan_mark_poison_p (s)))
empty_mem_ref_hash_table ();
gsi_next (&i);
flag_sanitize |= SANITIZE_ADDRESS;
}
+/* Poison or unpoison (depending on IS_CLOBBER variable) shadow memory based
+ on SHADOW address. Newly added statements will be added to ITER with
+ given location LOC. We mark SIZE bytes in shadow memory, where
+ LAST_CHUNK_SIZE is greater than zero in situation where we are at the
+ end of a variable. */
+
+static void
+asan_store_shadow_bytes (gimple_stmt_iterator *iter, location_t loc,
+ tree shadow,
+ unsigned HOST_WIDE_INT base_addr_offset,
+ bool is_clobber, unsigned size,
+ unsigned last_chunk_size)
+{
+ tree shadow_ptr_type;
+
+ switch (size)
+ {
+ case 1:
+ shadow_ptr_type = shadow_ptr_types[0];
+ break;
+ case 2:
+ shadow_ptr_type = shadow_ptr_types[1];
+ break;
+ case 4:
+ shadow_ptr_type = shadow_ptr_types[2];
+ break;
+ default:
+ gcc_unreachable ();
+ }
+
+ unsigned char c = (char) is_clobber ? ASAN_STACK_MAGIC_USE_AFTER_SCOPE : 0;
+ unsigned HOST_WIDE_INT val = 0;
+ for (unsigned i = 0; i < size; ++i)
+ {
+ unsigned char shadow_c = c;
+ if (i == size - 1 && last_chunk_size && !is_clobber)
+ shadow_c = last_chunk_size;
+ val |= (unsigned HOST_WIDE_INT) shadow_c << (BITS_PER_UNIT * i);
+ }
+
+ /* Handle last chunk in unpoisoning. */
+ tree magic = build_int_cst (TREE_TYPE (shadow_ptr_type), val);
+
+ tree dest = build2 (MEM_REF, TREE_TYPE (shadow_ptr_type), shadow,
+ build_int_cst (shadow_ptr_type, base_addr_offset));
+
+ gimple *g = gimple_build_assign (dest, magic);
+ gimple_set_location (g, loc);
+ gsi_insert_after (iter, g, GSI_NEW_STMT);
+}
+
+/* Expand the ASAN_MARK builtins. */
+
+bool
+asan_expand_mark_ifn (gimple_stmt_iterator *iter)
+{
+ gimple *g = gsi_stmt (*iter);
+ location_t loc = gimple_location (g);
+ HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
+ gcc_assert (flags < ASAN_MARK_LAST);
+ bool is_clobber = (flags & ASAN_MARK_CLOBBER) != 0;
+
+ tree base = gimple_call_arg (g, 1);
+ gcc_checking_assert (TREE_CODE (base) == ADDR_EXPR);
+ tree decl = TREE_OPERAND (base, 0);
+ gcc_checking_assert (TREE_CODE (decl) == VAR_DECL);
+ if (asan_handled_variables == NULL)
+ asan_handled_variables = new hash_set<tree> (16);
+ asan_handled_variables->add (decl);
+ tree len = gimple_call_arg (g, 2);
+
+ gcc_assert (tree_fits_shwi_p (len));
+ unsigned HOST_WIDE_INT size_in_bytes = tree_to_shwi (len);
+ gcc_assert (size_in_bytes);
+
+ g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
+ NOP_EXPR, base);
+ gimple_set_location (g, loc);
+ gsi_replace (iter, g, false);
+ tree base_addr = gimple_assign_lhs (g);
+
+ /* Generate direct emission if size_in_bytes is small. */
+ if (size_in_bytes <= ASAN_PARAM_USE_AFTER_SCOPE_DIRECT_EMISSION_THRESHOLD)
+ {
+ unsigned HOST_WIDE_INT shadow_size = shadow_mem_size (size_in_bytes);
+
+ tree shadow = build_shadow_mem_access (iter, loc, base_addr,
+ shadow_ptr_types[0], true);
+
+ for (unsigned HOST_WIDE_INT offset = 0; offset < shadow_size;)
+ {
+ unsigned size = 1;
+ if (shadow_size - offset >= 4)
+ size = 4;
+ else if (shadow_size - offset >= 2)
+ size = 2;
+
+ unsigned HOST_WIDE_INT last_chunk_size = 0;
+ unsigned HOST_WIDE_INT s = (offset + size) * ASAN_SHADOW_GRANULARITY;
+ if (s > size_in_bytes)
+ last_chunk_size = ASAN_SHADOW_GRANULARITY - (s - size_in_bytes);
+
+ asan_store_shadow_bytes (iter, loc, shadow, offset, is_clobber,
+ size, last_chunk_size);
+ offset += size;
+ }
+ }
+ else
+ {
+ g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
+ NOP_EXPR, len);
+ gimple_set_location (g, loc);
+ gsi_insert_before (iter, g, GSI_SAME_STMT);
+ tree sz_arg = gimple_assign_lhs (g);
+
+ tree fun = builtin_decl_implicit (is_clobber ? BUILT_IN_ASAN_CLOBBER_N
+ : BUILT_IN_ASAN_UNCLOBBER_N);
+ g = gimple_build_call (fun, 2, base_addr, sz_arg);
+ gimple_set_location (g, loc);
+ gsi_insert_after (iter, g, GSI_NEW_STMT);
+ }
+
+ return false;
+}
+
/* Expand the ASAN_{LOAD,STORE} builtins. */
bool
extern void initialize_sanitizer_builtins (void);
extern tree asan_dynamic_init_call (bool);
extern bool asan_expand_check_ifn (gimple_stmt_iterator *, bool);
+extern bool asan_expand_mark_ifn (gimple_stmt_iterator *);
extern gimple_stmt_iterator create_cond_insert_point
(gimple_stmt_iterator *, bool, bool, bool, basic_block *, basic_block *);
/* Alias set for accessing the shadow memory. */
extern alias_set_type asan_shadow_set;
+/* Hash set of labels that are either used in a goto, or their address
+ has been taken. */
+extern hash_set <tree> *asan_used_labels;
+
/* Shadow memory is found at
(address >> ASAN_SHADOW_SHIFT) + asan_shadow_offset (). */
#define ASAN_SHADOW_SHIFT 3
+#define ASAN_SHADOW_GRANULARITY (1UL << ASAN_SHADOW_SHIFT)
/* Red zone size, stack and global variables are padded by ASAN_RED_ZONE_SIZE
up to 2 * ASAN_RED_ZONE_SIZE - 1 bytes. */
the frame. Middle is for padding in between variables, right is
above the last protected variable and partial immediately after variables
up to ASAN_RED_ZONE_SIZE alignment. */
-#define ASAN_STACK_MAGIC_LEFT 0xf1
-#define ASAN_STACK_MAGIC_MIDDLE 0xf2
-#define ASAN_STACK_MAGIC_RIGHT 0xf3
-#define ASAN_STACK_MAGIC_PARTIAL 0xf4
-#define ASAN_STACK_MAGIC_USE_AFTER_RET 0xf5
+#define ASAN_STACK_MAGIC_LEFT 0xf1
+#define ASAN_STACK_MAGIC_MIDDLE 0xf2
+#define ASAN_STACK_MAGIC_RIGHT 0xf3
+#define ASAN_STACK_MAGIC_PARTIAL 0xf4
+#define ASAN_STACK_MAGIC_USE_AFTER_RET 0xf5
+#define ASAN_STACK_MAGIC_USE_AFTER_SCOPE 0xf8
#define ASAN_STACK_FRAME_MAGIC 0x41b58ab3
#define ASAN_STACK_RETIRED_MAGIC 0x45e0360e
-/* Return true if DECL should be guarded on the stack. */
-
-static inline bool
-asan_protect_stack_decl (tree decl)
+/* Various flags for Asan builtins. */
+enum asan_check_flags
{
- return DECL_P (decl) && !DECL_ARTIFICIAL (decl);
-}
+ ASAN_CHECK_STORE = 1 << 0,
+ ASAN_CHECK_SCALAR_ACCESS = 1 << 1,
+ ASAN_CHECK_NON_ZERO_LEN = 1 << 2,
+ ASAN_CHECK_LAST = 1 << 3
+};
+
+/* Flags for Asan check builtins. */
+enum asan_mark_flags
+{
+ ASAN_MARK_CLOBBER = 1 << 0,
+ ASAN_MARK_UNCLOBBER = 1 << 1,
+ ASAN_MARK_LAST = 1 << 2
+};
/* Return the size of padding needed to insert after a protected
decl of SIZE. */
extern void set_sanitized_sections (const char *);
+extern bool asan_sanitize_stack_p (void);
+
/* Return TRUE if builtin with given FCODE will be intercepted by
libasan. */
|| fcode == BUILT_IN_STRNCMP
|| fcode == BUILT_IN_STRNCPY;
}
+
+/* Return TRUE if we should instrument for use-after-scope sanity checking. */
+
+static inline bool
+asan_sanitize_use_after_scope (void)
+{
+ return (flag_sanitize_address_use_after_scope && asan_sanitize_stack_p ());
+}
+
+static inline bool
+asan_no_sanitize_address_p (void)
+{
+ return lookup_attribute ("no_sanitize_address",
+ DECL_ATTRIBUTES (current_function_decl));
+}
+
+/* Return true if DECL should be guarded on the stack. */
+
+static inline bool
+asan_protect_stack_decl (tree decl)
+{
+ return DECL_P (decl)
+ && (!DECL_ARTIFICIAL (decl)
+ || (asan_sanitize_use_after_scope () && TREE_ADDRESSABLE (decl)));
+}
+
#endif /* TREE_ASAN */
+2016-11-07 Martin Liska <mliska@suse.cz>
+
+ * c-warn.c (warn_for_unused_label): Save all labels used
+ in goto or in &label.
+
2016-11-03 Jason Merrill <jason@redhat.com>
* c-cppbuiltin.c (c_cpp_builtins): Correct
#include "tm_p.h"
#include "diagnostic.h"
#include "intl.h"
-
+#include "asan.h"
/* Print a warning if a constant expression had overflow in folding.
Invoke this function on every expression that the language
else
warning (OPT_Wunused_label, "label %q+D declared but not defined", label);
}
+ else if (asan_sanitize_use_after_scope ())
+ {
+ if (asan_used_labels == NULL)
+ asan_used_labels = new hash_set<tree> (16);
+
+ asan_used_labels->add (label);
+ }
}
/* Warn for division by zero according to the value of DIVISOR. LOC
}
}
-/* Return true if the current function should have its stack frame
- protected by address sanitizer. */
-
-static inline bool
-asan_sanitize_stack_p (void)
-{
- return ((flag_sanitize & SANITIZE_ADDRESS)
- && ASAN_STACK
- && !lookup_attribute ("no_sanitize_address",
- DECL_ATTRIBUTES (current_function_decl)));
-}
-
/* A subroutine of expand_used_vars. Binpack the variables into
partitions constrained by the interference graph. The overall
algorithm used is as follows:
sizes, as the shorter vars wouldn't be adequately protected.
Don't do that for "large" (unsupported) alignment objects,
those aren't protected anyway. */
- if (asan_sanitize_stack_p () && isize != jsize
+ if ((asan_sanitize_stack_p ())
+ && isize != jsize
&& ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
break;
if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
{
base = virtual_stack_vars_rtx;
- if (asan_sanitize_stack_p () && pred)
+ if ((asan_sanitize_stack_p ())
+ && pred)
{
HOST_WIDE_INT prev_offset
= align_base (frame_offset,
Common Report
This switch is deprecated; use -fsanitize-recover= instead.
+fsanitize-address-use-after-scope
+Common Driver Report Var(flag_sanitize_address_use_after_scope) Init(0)
+
fsanitize-undefined-trap-on-error
Common Driver Report Var(flag_sanitize_undefined_trap_on_error) Init(0)
Use trap instead of a library function for undefined behavior sanitization.
E.g. to disable inline code use
@option{--param asan-instrumentation-with-call-threshold=0}.
+@item use-after-scope-direct-emission-threshold
+If size of a local variables in bytes is smaller of equal to this number,
+direct instruction emission is utilized to poison and unpoison local variables.
+
@item chkp-max-ctor-size
Static constructors generated by Pointer Bounds Checker may become very
large and significantly increase compile time at optimization level
Enable AddressSanitizer, a fast memory error detector.
Memory access instructions are instrumented to detect
out-of-bounds and use-after-free bugs.
+The option enables @option{-fsanitize-address-use-after-scope}.
See @uref{https://github.com/google/sanitizers/wiki/AddressSanitizer} for
more details. The run-time behavior can be influenced using the
@env{ASAN_OPTIONS} environment variable. When set to @code{help=1},
@item -fsanitize=kernel-address
@opindex fsanitize=kernel-address
Enable AddressSanitizer for Linux kernel.
+The option enables @option{-fsanitize-address-use-after-scope}.
See @uref{https://github.com/google/kasan/wiki} for more details.
@item -fsanitize=thread
@option{-fsanitize=float-cast-overflow}, @option{-fsanitize=float-divide-by-zero},
@option{-fsanitize=bounds-strict},
@option{-fsanitize=kernel-address} and @option{-fsanitize=address}.
-For these sanitizers error recovery is turned on by default, except @option{-fsanitize=address},
-for which this feature is experimental.
+For these sanitizers error recovery is turned on by default,
+except @option{-fsanitize=address}, for which this feature is experimental.
@option{-fsanitize-recover=all} and @option{-fno-sanitize-recover=all} is also
accepted, the former enables recovery for all sanitizers that support it,
the latter disables recovery for all sanitizers that support it.
-fno-sanitize-recover=undefined,float-cast-overflow,float-divide-by-zero,bounds-strict
@end smallexample
+@item -fsanitize-address-use-after-scope
+@opindex fsanitize-address-use-after-scope
+Enable sanitization of local variables to detect use-after-scope bugs.
+The option sets @option{-fstack-reuse} to @samp{none}.
+
@item -fsanitize-undefined-trap-on-error
@opindex fsanitize-undefined-trap-on-error
The @option{-fsanitize-undefined-trap-on-error} option instructs the compiler to
#include "builtins.h"
#include "selftest.h"
#include "gimple-pretty-print.h"
+#include "asan.h"
/* All the tuples have their operand vector (if present) at the very bottom
{
case IFN_ABNORMAL_DISPATCHER:
return true;
+ case IFN_ASAN_MARK:
+ return tree_to_uhwi (gimple_call_arg (call, 0)) == ASAN_MARK_UNCLOBBER;
default:
if (gimple_call_flags (call) & ECF_LEAF)
return true;
#include "gimple-walk.h"
#include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
#include "builtins.h"
+#include "asan.h"
+
+/* Hash set of poisoned variables in a bind expr. */
+static hash_set<tree> *asan_poisoned_variables = NULL;
enum gimplify_omp_var_data
{
tree return_temp;
vec<tree> case_labels;
+ hash_set<tree> *live_switch_vars;
/* The formal temporary table. Should this be persistent? */
hash_table<gimplify_hasher> *temp_htab;
1, tmp_var);
}
+/* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
+
+static tree
+build_asan_poison_call_expr (tree decl)
+{
+ /* Do not poison variables that have size equal to zero. */
+ tree unit_size = DECL_SIZE_UNIT (decl);
+ if (zerop (unit_size))
+ return NULL_TREE;
+
+ tree base = build_fold_addr_expr (decl);
+
+ return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
+ void_type_node, 3,
+ build_int_cst (integer_type_node,
+ ASAN_MARK_CLOBBER),
+ base, unit_size);
+}
+
+/* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
+ on POISON flag, shadow memory of a DECL variable. The call will be
+ put on location identified by IT iterator, where BEFORE flag drives
+ position where the stmt will be put. */
+
+static void
+asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
+ bool before)
+{
+ /* When within an OMP context, do not emit ASAN_MARK internal fns. */
+ if (gimplify_omp_ctxp)
+ return;
+
+ tree unit_size = DECL_SIZE_UNIT (decl);
+ tree base = build_fold_addr_expr (decl);
+
+ /* Do not poison variables that have size equal to zero. */
+ if (zerop (unit_size))
+ return;
+
+ /* It's necessary to have all stack variables aligned to ASAN granularity
+ bytes. */
+ if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
+ SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
+
+ HOST_WIDE_INT flags = poison ? ASAN_MARK_CLOBBER : ASAN_MARK_UNCLOBBER;
+
+ gimple *g
+ = gimple_build_call_internal (IFN_ASAN_MARK, 3,
+ build_int_cst (integer_type_node, flags),
+ base, unit_size);
+
+ if (before)
+ gsi_insert_before (it, g, GSI_NEW_STMT);
+ else
+ gsi_insert_after (it, g, GSI_NEW_STMT);
+}
+
+/* Generate IFN_ASAN_MARK internal call that depending on POISON flag
+ either poisons or unpoisons a DECL. Created statement is appended
+ to SEQ_P gimple sequence. */
+
+static void
+asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
+{
+ gimple_stmt_iterator it = gsi_last (*seq_p);
+ bool before = false;
+
+ if (gsi_end_p (it))
+ before = true;
+
+ asan_poison_variable (decl, poison, &it, before);
+}
+
+/* Sort pair of VAR_DECLs A and B by DECL_UID. */
+
+static int
+sort_by_decl_uid (const void *a, const void *b)
+{
+ const tree *t1 = (const tree *)a;
+ const tree *t2 = (const tree *)b;
+
+ int uid1 = DECL_UID (*t1);
+ int uid2 = DECL_UID (*t2);
+
+ if (uid1 < uid2)
+ return -1;
+ else if (uid1 > uid2)
+ return 1;
+ else
+ return 0;
+}
+
+/* Generate IFN_ASAN_MARK internal call for all VARIABLES
+ depending on POISON flag. Created statement is appended
+ to SEQ_P gimple sequence. */
+
+static void
+asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
+{
+ unsigned c = variables->elements ();
+ if (c == 0)
+ return;
+
+ auto_vec<tree> sorted_variables (c);
+
+ for (hash_set<tree>::iterator it = variables->begin ();
+ it != variables->end (); ++it)
+ sorted_variables.safe_push (*it);
+
+ sorted_variables.qsort (sort_by_decl_uid);
+
+ for (unsigned i = 0; i < sorted_variables.length (); i++)
+ asan_poison_variable (sorted_variables[i], poison, seq_p);
+}
+
/* Gimplify a BIND_EXPR. Just voidify and recurse. */
static enum gimplify_status
}
}
}
+
+ if (asan_poisoned_variables != NULL
+ && asan_poisoned_variables->contains (t))
+ {
+ asan_poisoned_variables->remove (t);
+ asan_poison_variable (t, true, &cleanup);
+ }
+
+ if (gimplify_ctxp->live_switch_vars != NULL
+ && gimplify_ctxp->live_switch_vars->contains (t))
+ gimplify_ctxp->live_switch_vars->remove (t);
}
if (ret_clauses)
if (VAR_P (decl) && !DECL_EXTERNAL (decl))
{
tree init = DECL_INITIAL (decl);
+ bool is_vla = false;
if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
|| (!TREE_STATIC (decl)
&& flag_stack_check == GENERIC_STACK_CHECK
&& compare_tree_int (DECL_SIZE_UNIT (decl),
STACK_CHECK_MAX_VAR_SIZE) > 0))
- gimplify_vla_decl (decl, seq_p);
+ {
+ gimplify_vla_decl (decl, seq_p);
+ is_vla = true;
+ }
+
+ if (asan_sanitize_use_after_scope ()
+ && !asan_no_sanitize_address_p ()
+ && !is_vla
+ && TREE_ADDRESSABLE (decl)
+ && !TREE_STATIC (decl))
+ {
+ asan_poisoned_variables->add (decl);
+ asan_poison_variable (decl, false, seq_p);
+ if (gimplify_ctxp->live_switch_vars)
+ gimplify_ctxp->live_switch_vars->add (decl);
+ }
/* Some front ends do not explicitly declare all anonymous
artificial variables. We compensate here by declaring the
/* Walk the sub-statements. */
*handled_ops_p = false;
break;
+ case GIMPLE_CALL:
+ if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
+ {
+ *handled_ops_p = false;
+ break;
+ }
+ /* Fall through. */
default:
/* Save the first "real" statement (not a decl/lexical scope/...). */
wi->info = stmt;
if (find_label_entry (labels, label))
prev = gsi_stmt (*gsi_p);
}
+ else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
+ ;
else
prev = gsi_stmt (*gsi_p);
gsi_next (gsi_p);
{
vec<tree> labels;
vec<tree> saved_labels;
+ hash_set<tree> *saved_live_switch_vars;
tree default_case = NULL_TREE;
gswitch *switch_stmt;
labels. Save all the things from the switch body to append after. */
saved_labels = gimplify_ctxp->case_labels;
gimplify_ctxp->case_labels.create (8);
+ saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
+ gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
gimplify_ctxp->in_switch_expr = true;
labels = gimplify_ctxp->case_labels;
gimplify_ctxp->case_labels = saved_labels;
+ gcc_assert (gimplify_ctxp->live_switch_vars->elements () == 0);
+ delete gimplify_ctxp->live_switch_vars;
+ gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
preprocess_case_label_vec_for_gimple (labels, index_type,
&default_case);
tree init = TARGET_EXPR_INITIAL (targ);
enum gimplify_status ret;
+ bool unpoison_empty_seq = false;
+ gimple_stmt_iterator unpoison_it;
+
if (init)
{
tree cleanup = NULL_TREE;
gimplify_vla_decl (temp, pre_p);
}
else
- gimple_add_tmp_var (temp);
+ {
+ /* Save location where we need to place unpoisoning. It's possible
+ that a variable will be converted to needs_to_live_in_memory. */
+ unpoison_it = gsi_last (*pre_p);
+ unpoison_empty_seq = gsi_end_p (unpoison_it);
+
+ gimple_add_tmp_var (temp);
+ }
/* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
expression is supposed to initialize the slot. */
/* Add a clobber for the temporary going out of scope, like
gimplify_bind_expr. */
if (gimplify_ctxp->in_cleanup_point_expr
- && needs_to_live_in_memory (temp)
- && flag_stack_reuse == SR_ALL)
- {
- tree clobber = build_constructor (TREE_TYPE (temp),
- NULL);
- TREE_THIS_VOLATILE (clobber) = true;
- clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
- if (cleanup)
- cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
- clobber);
- else
- cleanup = clobber;
- }
+ && needs_to_live_in_memory (temp))
+ {
+ if (flag_stack_reuse == SR_ALL)
+ {
+ tree clobber = build_constructor (TREE_TYPE (temp),
+ NULL);
+ TREE_THIS_VOLATILE (clobber) = true;
+ clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
+ if (cleanup)
+ cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
+ clobber);
+ else
+ cleanup = clobber;
+ }
+ if (asan_sanitize_use_after_scope ())
+ {
+ tree asan_cleanup = build_asan_poison_call_expr (temp);
+ if (asan_cleanup)
+ {
+ if (unpoison_empty_seq)
+ unpoison_it = gsi_start (*pre_p);
+ asan_poison_variable (temp, false, &unpoison_it,
+ unpoison_empty_seq);
+ gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
+ }
+ }
+ }
if (cleanup)
gimple_push_cleanup (temp, cleanup, false, pre_p);
location_t saved_location;
enum gimplify_status ret;
gimple_stmt_iterator pre_last_gsi, post_last_gsi;
+ tree label;
save_expr = *expr_p;
if (save_expr == NULL_TREE)
case LABEL_EXPR:
ret = gimplify_label_expr (expr_p, pre_p);
+ label = LABEL_EXPR_LABEL (*expr_p);
+ gcc_assert (decl_function_context (label) == current_function_decl);
+
+ /* If the label is used in a goto statement, or address of the label
+ is taken, we need to unpoison all variables that were seen so far.
+ Doing so would prevent us from reporting a false positives. */
+ if (asan_sanitize_use_after_scope ()
+ && asan_used_labels != NULL
+ && asan_used_labels->contains (label))
+ asan_poison_variables (asan_poisoned_variables, false, pre_p);
break;
case CASE_LABEL_EXPR:
ret = gimplify_case_label_expr (expr_p, pre_p);
+
+ if (gimplify_ctxp->live_switch_vars)
+ asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
+ pre_p);
break;
case RETURN_EXPR:
&& !needs_to_live_in_memory (ret))
DECL_GIMPLE_REG_P (ret) = 1;
+ asan_poisoned_variables = new hash_set<tree> ();
bind = gimplify_body (fndecl, true);
+ delete asan_poisoned_variables;
+ asan_poisoned_variables = NULL;
/* The tree body of the function is no longer needed, replace it
with the new GIMPLE body. */
gcc_unreachable ();
}
+/* This should get expanded in the sanopt pass. */
+
+static void
+expand_ASAN_MARK (internal_fn, gcall *)
+{
+ gcc_unreachable ();
+}
+
+
/* This should get expanded in the tsan pass. */
static void
DEF_INTERNAL_FN (ABNORMAL_DISPATCHER, ECF_NORETURN, NULL)
DEF_INTERNAL_FN (BUILTIN_EXPECT, ECF_CONST | ECF_LEAF | ECF_NOTHROW, NULL)
DEF_INTERNAL_FN (ASAN_CHECK, ECF_TM_PURE | ECF_LEAF | ECF_NOTHROW, ".R...")
+DEF_INTERNAL_FN (ASAN_MARK, ECF_TM_PURE | ECF_LEAF | ECF_NOTHROW, ".R..")
DEF_INTERNAL_FN (ADD_OVERFLOW, ECF_CONST | ECF_LEAF | ECF_NOTHROW, NULL)
DEF_INTERNAL_FN (SUB_OVERFLOW, ECF_CONST | ECF_LEAF | ECF_NOTHROW, NULL)
DEF_INTERNAL_FN (MUL_OVERFLOW, ECF_CONST | ECF_LEAF | ECF_NOTHROW, NULL)
opts->x_flag_aggressive_loop_optimizations = 0;
opts->x_flag_strict_overflow = 0;
}
+
+ /* Enable -fsanitize-address-use-after-scope if address sanitizer is
+ enabled. */
+ if (opts->x_flag_sanitize
+ && !opts_set->x_flag_sanitize_address_use_after_scope)
+ opts->x_flag_sanitize_address_use_after_scope = true;
+
+ /* Force -fstack-reuse=none in case -fsanitize-address-use-after-scope
+ is enabled. */
+ if (opts->x_flag_sanitize_address_use_after_scope)
+ {
+ if (opts->x_flag_stack_reuse != SR_NONE
+ && opts_set->x_flag_stack_reuse != SR_NONE)
+ error_at (loc,
+ "-fsanitize-address-use-after-scope requires "
+ "-fstack-reuse=none option");
+
+ opts->x_flag_stack_reuse = SR_NONE;
+ }
}
#define LEFT_COLUMN 27
{
#define SANITIZER_OPT(name, flags, recover) \
{ #name, flags, sizeof #name - 1, recover }
- SANITIZER_OPT (address, SANITIZE_ADDRESS | SANITIZE_USER_ADDRESS, true),
- SANITIZER_OPT (kernel-address, SANITIZE_ADDRESS | SANITIZE_KERNEL_ADDRESS,
+ SANITIZER_OPT (address, (SANITIZE_ADDRESS | SANITIZE_USER_ADDRESS), true),
+ SANITIZER_OPT (kernel-address, (SANITIZE_ADDRESS | SANITIZE_KERNEL_ADDRESS),
true),
SANITIZER_OPT (thread, SANITIZE_THREAD, false),
SANITIZER_OPT (leak, SANITIZE_LEAK, false),
/* Deferred. */
break;
+ case OPT_fsanitize_address_use_after_scope:
+ opts->x_flag_sanitize_address_use_after_scope = value;
+ break;
+
case OPT_fsanitize_recover:
if (value)
opts->x_flag_sanitize_recover
"in function becomes greater or equal to this number.",
7000, 0, INT_MAX)
+DEFPARAM (PARAM_USE_AFTER_SCOPE_DIRECT_EMISSION_THRESHOLD,
+ "use-after-scope-direct-emission-threshold",
+ "Use direct poisoning/unpoisoning intructions for variables "
+ "smaller or equal to this number.",
+ 256, 0, INT_MAX)
+
DEFPARAM (PARAM_UNINIT_CONTROL_DEP_ATTEMPTS,
"uninit-control-dep-attempts",
"Maximum number of nested calls to search for control dependencies "
PARAM_VALUE (PARAM_ASAN_USE_AFTER_RETURN)
#define ASAN_INSTRUMENTATION_WITH_CALL_THRESHOLD \
PARAM_VALUE (PARAM_ASAN_INSTRUMENTATION_WITH_CALL_THRESHOLD)
+#define ASAN_PARAM_USE_AFTER_SCOPE_DIRECT_EMISSION_THRESHOLD \
+ ((unsigned) PARAM_VALUE (PARAM_USE_AFTER_SCOPE_DIRECT_EMISSION_THRESHOLD))
#endif /* ! GCC_PARAMS_H */
DEF_SANITIZER_BUILTIN(BUILT_IN_ASAN_AFTER_DYNAMIC_INIT,
"__asan_after_dynamic_init",
BT_FN_VOID, ATTR_NOTHROW_LEAF_LIST)
+DEF_SANITIZER_BUILTIN(BUILT_IN_ASAN_CLOBBER_N, "__asan_poison_stack_memory",
+ BT_FN_VOID_PTR_PTRMODE, 0)
+DEF_SANITIZER_BUILTIN(BUILT_IN_ASAN_UNCLOBBER_N, "__asan_unpoison_stack_memory",
+ BT_FN_VOID_PTR_PTRMODE, 0)
/* Thread Sanitizer */
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_INIT, "__tsan_init",
case IFN_ASAN_CHECK:
no_next = asan_expand_check_ifn (&gsi, use_calls);
break;
+ case IFN_ASAN_MARK:
+ no_next = asan_expand_mark_ifn (&gsi);
+ break;
default:
break;
}