#include "langhooks.h"
#include "cfgloop.h"
#include "gimple-builder.h"
+#include "gimple-fold.h"
#include "ubsan.h"
#include "params.h"
#include "builtins.h"
static unsigned HOST_WIDE_INT asan_shadow_offset_value;
static bool asan_shadow_offset_computed;
static vec<char *> sanitized_sections;
+static tree last_alloca_addr;
/* Set of variable declarations that are going to be guarded by
use-after-scope sanitizer. */
return true;
}
+/* Return address of last allocated dynamic alloca. */
+
+static tree
+get_last_alloca_addr ()
+{
+ if (last_alloca_addr)
+ return last_alloca_addr;
+
+ last_alloca_addr = create_tmp_reg (ptr_type_node, "last_alloca_addr");
+ gassign *g = gimple_build_assign (last_alloca_addr, null_pointer_node);
+ edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
+ gsi_insert_on_edge_immediate (e, g);
+ return last_alloca_addr;
+}
+
+/* Insert __asan_allocas_unpoison (top, bottom) call after
+ __builtin_stack_restore (new_sp) call.
+ The pseudocode of this routine should look like this:
+ __builtin_stack_restore (new_sp);
+ top = last_alloca_addr;
+ bot = new_sp;
+ __asan_allocas_unpoison (top, bot);
+ last_alloca_addr = new_sp;
+ In general, we can't use new_sp as bot parameter because on some
+ architectures SP has non zero offset from dynamic stack area. Moreover, on
+ some architectures this offset (STACK_DYNAMIC_OFFSET) becomes known for each
+ particular function only after all callees were expanded to rtl.
+ The most noticeable example is PowerPC{,64}, see
+ http://refspecs.linuxfoundation.org/ELF/ppc64/PPC-elf64abi.html#DYNAM-STACK.
+ To overcome the issue we use following trick: pass new_sp as a second
+ parameter to __asan_allocas_unpoison and rewrite it during expansion with
+ virtual_dynamic_stack_rtx later in expand_asan_emit_allocas_unpoison
+ function.
+*/
+
+static void
+handle_builtin_stack_restore (gcall *call, gimple_stmt_iterator *iter)
+{
+ if (!iter)
+ return;
+
+ tree last_alloca = get_last_alloca_addr ();
+ tree restored_stack = gimple_call_arg (call, 0);
+ tree fn = builtin_decl_implicit (BUILT_IN_ASAN_ALLOCAS_UNPOISON);
+ gimple *g = gimple_build_call (fn, 2, last_alloca, restored_stack);
+ gsi_insert_after (iter, g, GSI_NEW_STMT);
+ g = gimple_build_assign (last_alloca, restored_stack);
+ gsi_insert_after (iter, g, GSI_NEW_STMT);
+}
+
+/* Deploy and poison redzones around __builtin_alloca call. To do this, we
+ should replace this call with another one with changed parameters and
+ replace all its uses with new address, so
+ addr = __builtin_alloca (old_size, align);
+ is replaced by
+ left_redzone_size = max (align, ASAN_RED_ZONE_SIZE);
+ Following two statements are optimized out if we know that
+ old_size & (ASAN_RED_ZONE_SIZE - 1) == 0, i.e. alloca doesn't need partial
+ redzone.
+ misalign = old_size & (ASAN_RED_ZONE_SIZE - 1);
+ partial_redzone_size = ASAN_RED_ZONE_SIZE - misalign;
+ right_redzone_size = ASAN_RED_ZONE_SIZE;
+ additional_size = left_redzone_size + partial_redzone_size +
+ right_redzone_size;
+ new_size = old_size + additional_size;
+ new_alloca = __builtin_alloca (new_size, max (align, 32))
+ __asan_alloca_poison (new_alloca, old_size)
+ addr = new_alloca + max (align, ASAN_RED_ZONE_SIZE);
+ last_alloca_addr = new_alloca;
+ ADDITIONAL_SIZE is added to make new memory allocation contain not only
+ requested memory, but also left, partial and right redzones as well as some
+ additional space, required by alignment. */
+
+static void
+handle_builtin_alloca (gcall *call, gimple_stmt_iterator *iter)
+{
+ if (!iter)
+ return;
+
+ gassign *g;
+ gcall *gg;
+ const HOST_WIDE_INT redzone_mask = ASAN_RED_ZONE_SIZE - 1;
+
+ tree last_alloca = get_last_alloca_addr ();
+ tree callee = gimple_call_fndecl (call);
+ tree old_size = gimple_call_arg (call, 0);
+ tree ptr_type = gimple_call_lhs (call) ? TREE_TYPE (gimple_call_lhs (call))
+ : ptr_type_node;
+ tree partial_size = NULL_TREE;
+ bool alloca_with_align
+ = DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA_WITH_ALIGN;
+ unsigned int align
+ = alloca_with_align ? tree_to_uhwi (gimple_call_arg (call, 1)) : 0;
+
+ /* If ALIGN > ASAN_RED_ZONE_SIZE, we embed left redzone into first ALIGN
+ bytes of allocated space. Otherwise, align alloca to ASAN_RED_ZONE_SIZE
+ manually. */
+ align = MAX (align, ASAN_RED_ZONE_SIZE * BITS_PER_UNIT);
+
+ tree alloca_rz_mask = build_int_cst (size_type_node, redzone_mask);
+ tree redzone_size = build_int_cst (size_type_node, ASAN_RED_ZONE_SIZE);
+
+ /* Extract lower bits from old_size. */
+ wide_int size_nonzero_bits = get_nonzero_bits (old_size);
+ wide_int rz_mask
+ = wi::uhwi (redzone_mask, wi::get_precision (size_nonzero_bits));
+ wide_int old_size_lower_bits = wi::bit_and (size_nonzero_bits, rz_mask);
+
+ /* If alloca size is aligned to ASAN_RED_ZONE_SIZE, we don't need partial
+ redzone. Otherwise, compute its size here. */
+ if (wi::ne_p (old_size_lower_bits, 0))
+ {
+ /* misalign = size & (ASAN_RED_ZONE_SIZE - 1)
+ partial_size = ASAN_RED_ZONE_SIZE - misalign. */
+ g = gimple_build_assign (make_ssa_name (size_type_node, NULL),
+ BIT_AND_EXPR, old_size, alloca_rz_mask);
+ gsi_insert_before (iter, g, GSI_SAME_STMT);
+ tree misalign = gimple_assign_lhs (g);
+ g = gimple_build_assign (make_ssa_name (size_type_node, NULL), MINUS_EXPR,
+ redzone_size, misalign);
+ gsi_insert_before (iter, g, GSI_SAME_STMT);
+ partial_size = gimple_assign_lhs (g);
+ }
+
+ /* additional_size = align + ASAN_RED_ZONE_SIZE. */
+ tree additional_size = build_int_cst (size_type_node, align / BITS_PER_UNIT
+ + ASAN_RED_ZONE_SIZE);
+ /* If alloca has partial redzone, include it to additional_size too. */
+ if (partial_size)
+ {
+ /* additional_size += partial_size. */
+ g = gimple_build_assign (make_ssa_name (size_type_node), PLUS_EXPR,
+ partial_size, additional_size);
+ gsi_insert_before (iter, g, GSI_SAME_STMT);
+ additional_size = gimple_assign_lhs (g);
+ }
+
+ /* new_size = old_size + additional_size. */
+ g = gimple_build_assign (make_ssa_name (size_type_node), PLUS_EXPR, old_size,
+ additional_size);
+ gsi_insert_before (iter, g, GSI_SAME_STMT);
+ tree new_size = gimple_assign_lhs (g);
+
+ /* Build new __builtin_alloca call:
+ new_alloca_with_rz = __builtin_alloca (new_size, align). */
+ tree fn = builtin_decl_implicit (BUILT_IN_ALLOCA_WITH_ALIGN);
+ gg = gimple_build_call (fn, 2, new_size,
+ build_int_cst (size_type_node, align));
+ tree new_alloca_with_rz = make_ssa_name (ptr_type, gg);
+ gimple_call_set_lhs (gg, new_alloca_with_rz);
+ gsi_insert_before (iter, gg, GSI_SAME_STMT);
+
+ /* new_alloca = new_alloca_with_rz + align. */
+ g = gimple_build_assign (make_ssa_name (ptr_type), POINTER_PLUS_EXPR,
+ new_alloca_with_rz,
+ build_int_cst (size_type_node,
+ align / BITS_PER_UNIT));
+ gsi_insert_before (iter, g, GSI_SAME_STMT);
+ tree new_alloca = gimple_assign_lhs (g);
+
+ /* Poison newly created alloca redzones:
+ __asan_alloca_poison (new_alloca, old_size). */
+ fn = builtin_decl_implicit (BUILT_IN_ASAN_ALLOCA_POISON);
+ gg = gimple_build_call (fn, 2, new_alloca, old_size);
+ gsi_insert_before (iter, gg, GSI_SAME_STMT);
+
+ /* Save new_alloca_with_rz value into last_alloca to use it during
+ allocas unpoisoning. */
+ g = gimple_build_assign (last_alloca, new_alloca_with_rz);
+ gsi_insert_before (iter, g, GSI_SAME_STMT);
+
+ /* Finally, replace old alloca ptr with NEW_ALLOCA. */
+ replace_call_with_value (iter, new_alloca);
+}
+
/* Return the memory references contained in a gimple statement
representing a builtin call that has to do with memory access. */
static bool
-get_mem_refs_of_builtin_call (const gcall *call,
+get_mem_refs_of_builtin_call (gcall *call,
asan_mem_ref *src0,
tree *src0_len,
bool *src0_is_store,
tree *dst_len,
bool *dst_is_store,
bool *dest_is_deref,
- bool *intercepted_p)
+ bool *intercepted_p,
+ gimple_stmt_iterator *iter = NULL)
{
gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
len = gimple_call_lhs (call);
break;
+ case BUILT_IN_STACK_RESTORE:
+ handle_builtin_stack_restore (call, iter);
+ break;
+
+ case BUILT_IN_ALLOCA_WITH_ALIGN:
+ case BUILT_IN_ALLOCA:
+ handle_builtin_alloca (call, iter);
+ break;
/* And now the __atomic* and __sync builtins.
These are handled differently from the classical memory memory
access builtins above. */
return insns;
}
+/* Emit __asan_allocas_unpoison (top, bot) call. The BASE parameter corresponds
+ to BOT argument, for TOP virtual_stack_dynamic_rtx is used. NEW_SEQUENCE
+ indicates whether we're emitting new instructions sequence or not. */
+
+rtx_insn *
+asan_emit_allocas_unpoison (rtx top, rtx bot, rtx_insn *before)
+{
+ if (before)
+ push_to_sequence (before);
+ else
+ start_sequence ();
+ rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
+ ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 2, top,
+ TYPE_MODE (pointer_sized_int_node), bot,
+ TYPE_MODE (pointer_sized_int_node));
+
+ do_pending_stack_adjust ();
+ rtx_insn *insns = get_insns ();
+ end_sequence ();
+ return insns;
+}
+
/* Return true if DECL, a global var, might be overridden and needs
therefore a local alias. */
&src0, &src0_len, &src0_is_store,
&src1, &src1_len, &src1_is_store,
&dest, &dest_len, &dest_is_store,
- &dest_is_deref, &intercepted_p))
+ &dest_is_deref, &intercepted_p, iter))
{
if (dest_is_deref)
{
if (shadow_ptr_types[0] == NULL_TREE)
asan_init_shadow_ptr_types ();
transform_statements ();
+ last_alloca_addr = NULL_TREE;
return 0;
}