return (asan_sanitize_stack_p () && param_asan_protect_allocas);
}
+bool
+asan_instrument_reads (void)
+{
+ return (sanitize_flags_p (SANITIZE_ADDRESS) && param_asan_instrument_reads);
+}
+
+bool
+asan_instrument_writes (void)
+{
+ return (sanitize_flags_p (SANITIZE_ADDRESS) && param_asan_instrument_writes);
+}
+
+bool
+asan_memintrin (void)
+{
+ return (sanitize_flags_p (SANITIZE_ADDRESS) && param_asan_memintrin);
+}
+
+
/* Checks whether section SEC should be sanitized. */
static bool
To overcome the issue we use following trick: pass new_sp as a second
parameter to __asan_allocas_unpoison and rewrite it during expansion with
new_sp + (virtual_dynamic_stack_rtx - sp) later in
- expand_asan_emit_allocas_unpoison function. */
+ expand_asan_emit_allocas_unpoison function.
+
+ HWASAN needs to do very similar, the eventual pseudocode should be:
+ __hwasan_tag_memory (virtual_stack_dynamic_rtx,
+ 0,
+ new_sp - sp);
+ __builtin_stack_restore (new_sp)
+
+ Need to use the same trick to handle STACK_DYNAMIC_OFFSET as described
+ above. */
static void
handle_builtin_stack_restore (gcall *call, gimple_stmt_iterator *iter)
{
- if (!iter || !asan_sanitize_allocas_p ())
+ if (!iter
+ || !(asan_sanitize_allocas_p () || hwasan_sanitize_allocas_p ()))
return;
- tree last_alloca = get_last_alloca_addr ();
tree restored_stack = gimple_call_arg (call, 0);
- tree fn = builtin_decl_implicit (BUILT_IN_ASAN_ALLOCAS_UNPOISON);
- gimple *g = gimple_build_call (fn, 2, last_alloca, restored_stack);
- gsi_insert_before (iter, g, GSI_SAME_STMT);
- g = gimple_build_assign (last_alloca, restored_stack);
+
+ gimple *g;
+
+ if (hwasan_sanitize_allocas_p ())
+ {
+ enum internal_fn fn = IFN_HWASAN_ALLOCA_UNPOISON;
+ /* There is only one piece of information `expand_HWASAN_ALLOCA_UNPOISON`
+ needs to work. This is the length of the area that we're
+ deallocating. Since the stack pointer is known at expand time, the
+ position of the new stack pointer after deallocation is enough
+ information to calculate this length. */
+ g = gimple_build_call_internal (fn, 1, restored_stack);
+ }
+ else
+ {
+ tree last_alloca = get_last_alloca_addr ();
+ tree fn = builtin_decl_implicit (BUILT_IN_ASAN_ALLOCAS_UNPOISON);
+ g = gimple_build_call (fn, 2, last_alloca, restored_stack);
+ gsi_insert_before (iter, g, GSI_SAME_STMT);
+ g = gimple_build_assign (last_alloca, restored_stack);
+ }
+
gsi_insert_before (iter, g, GSI_SAME_STMT);
}
static void
handle_builtin_alloca (gcall *call, gimple_stmt_iterator *iter)
{
- if (!iter || !asan_sanitize_allocas_p ())
+ if (!iter
+ || !(asan_sanitize_allocas_p () || hwasan_sanitize_allocas_p ()))
return;
gassign *g;
gcall *gg;
- const HOST_WIDE_INT redzone_mask = ASAN_RED_ZONE_SIZE - 1;
-
- tree last_alloca = get_last_alloca_addr ();
tree callee = gimple_call_fndecl (call);
tree old_size = gimple_call_arg (call, 0);
tree ptr_type = gimple_call_lhs (call) ? TREE_TYPE (gimple_call_lhs (call))
= DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
? 0 : tree_to_uhwi (gimple_call_arg (call, 1));
+ if (hwasan_sanitize_allocas_p ())
+ {
+ gimple_seq stmts = NULL;
+ location_t loc = gimple_location (gsi_stmt (*iter));
+ /*
+ HWASAN needs a different expansion.
+
+ addr = __builtin_alloca (size, align);
+
+ should be replaced by
+
+ new_size = size rounded up to HWASAN_TAG_GRANULE_SIZE byte alignment;
+ untagged_addr = __builtin_alloca (new_size, align);
+ tag = __hwasan_choose_alloca_tag ();
+ addr = ifn_HWASAN_SET_TAG (untagged_addr, tag);
+ __hwasan_tag_memory (untagged_addr, tag, new_size);
+ */
+ /* Ensure alignment at least HWASAN_TAG_GRANULE_SIZE bytes so we start on
+ a tag granule. */
+ align = align > HWASAN_TAG_GRANULE_SIZE ? align : HWASAN_TAG_GRANULE_SIZE;
+
+ tree old_size = gimple_call_arg (call, 0);
+ tree new_size = gimple_build_round_up (&stmts, loc, size_type_node,
+ old_size,
+ HWASAN_TAG_GRANULE_SIZE);
+
+ /* Make the alloca call */
+ tree untagged_addr
+ = gimple_build (&stmts, loc,
+ as_combined_fn (BUILT_IN_ALLOCA_WITH_ALIGN), ptr_type,
+ new_size, build_int_cst (size_type_node, align));
+
+ /* Choose the tag.
+ Here we use an internal function so we can choose the tag at expand
+ time. We need the decision to be made after stack variables have been
+ assigned their tag (i.e. once the hwasan_frame_tag_offset variable has
+ been set to one after the last stack variables tag). */
+ tree tag = gimple_build (&stmts, loc, CFN_HWASAN_CHOOSE_TAG,
+ unsigned_char_type_node);
+
+ /* Add tag to pointer. */
+ tree addr
+ = gimple_build (&stmts, loc, CFN_HWASAN_SET_TAG, ptr_type,
+ untagged_addr, tag);
+
+ /* Tag shadow memory.
+ NOTE: require using `untagged_addr` here for libhwasan API. */
+ gimple_build (&stmts, loc, as_combined_fn (BUILT_IN_HWASAN_TAG_MEM),
+ void_type_node, untagged_addr, tag, new_size);
+
+ /* Insert the built up code sequence into the original instruction stream
+ the iterator points to. */
+ gsi_insert_seq_before (iter, stmts, GSI_SAME_STMT);
+
+ /* Finally, replace old alloca ptr with NEW_ALLOCA. */
+ replace_call_with_value (iter, addr);
+ return;
+ }
+
+ tree last_alloca = get_last_alloca_addr ();
+ const HOST_WIDE_INT redzone_mask = ASAN_RED_ZONE_SIZE - 1;
+
/* If ALIGN > ASAN_RED_ZONE_SIZE, we embed left redzone into first ALIGN
bytes of allocated space. Otherwise, align alloca to ASAN_RED_ZONE_SIZE
manually. */
break;
case BUILT_IN_STRLEN:
+ /* Special case strlen here since its length is taken from its return
+ value.
+
+ The approach taken by the sanitizers is to check a memory access
+ before it's taken. For ASAN strlen is intercepted by libasan, so no
+ check is inserted by the compiler.
+
+ This function still returns `true` and provides a length to the rest
+ of the ASAN pass in order to record what areas have been checked,
+ avoiding superfluous checks later on.
+
+ HWASAN does not intercept any of these internal functions.
+ This means that checks for memory accesses must be inserted by the
+ compiler.
+ strlen is a special case, because we can tell the length from the
+ return of the function, but that is not known until after the function
+ has returned.
+
+ Hence we can't check the memory access before it happens.
+ We could check the memory access after it has already happened, but
+ for now we choose to just ignore `strlen` calls.
+ This decision was simply made because that means the special case is
+ limited to this one case of this one function. */
+ if (hwasan_sanitize_p ())
+ return false;
source0 = gimple_call_arg (call, 0);
len = gimple_call_lhs (call);
break;
flush_redzone_payload ();
}
+
+/* HWAddressSanitizer (hwasan) is a probabilistic method for detecting
+ out-of-bounds and use-after-free bugs.
+ Read more:
+ http://code.google.com/p/address-sanitizer/
+
+ Similar to AddressSanitizer (asan) it consists of two parts: the
+ instrumentation module in this file, and a run-time library.
+
+ The instrumentation module adds a run-time check before every memory insn in
+ the same manner as asan (see the block comment for AddressSanitizer above).
+ Currently, hwasan only adds out-of-line instrumentation, where each check is
+ implemented as a function call to the run-time library. Hence a check for a
+ load of N bytes from address X would be implemented with a function call to
+ __hwasan_loadN(X), and checking a store of N bytes from address X would be
+ implemented with a function call to __hwasan_storeN(X).
+
+ The main difference between hwasan and asan is in the information stored to
+ help this checking. Both sanitizers use a shadow memory area which stores
+ data recording the state of main memory at a corresponding address.
+
+ For hwasan, each 16 byte granule in main memory has a corresponding 1 byte
+ in shadow memory. This shadow address can be calculated with equation:
+ (addr >> log_2(HWASAN_TAG_GRANULE_SIZE))
+ + __hwasan_shadow_memory_dynamic_address;
+ The conversion between real and shadow memory for asan is given in the block
+ comment at the top of this file.
+ The description of how this shadow memory is laid out for asan is in the
+ block comment at the top of this file, here we describe how this shadow
+ memory is used for hwasan.
+
+ For hwasan, each variable is assigned a byte-sized 'tag'. The extent of
+ the shadow memory for that variable is filled with the assigned tag, and
+ every pointer referencing that variable has its top byte set to the same
+ tag. The run-time library redefines malloc so that every allocation returns
+ a tagged pointer and tags the corresponding shadow memory with the same tag.
+
+ On each pointer dereference the tag found in the pointer is compared to the
+ tag found in the shadow memory corresponding to the accessed memory address.
+ If these tags are found to differ then this memory access is judged to be
+ invalid and a report is generated.
+
+ This method of bug detection is not perfect -- it can not catch every bad
+ access -- but catches them probabilistically instead. There is always the
+ possibility that an invalid memory access will happen to access memory
+ tagged with the same tag as the pointer that this access used.
+ The chances of this are approx. 0.4% for any two uncorrelated objects.
+
+ Random tag generation can mitigate this problem by decreasing the
+ probability that an invalid access will be missed in the same manner over
+ multiple runs. i.e. if two objects are tagged the same in one run of the
+ binary they are unlikely to be tagged the same in the next run.
+ Both heap and stack allocated objects have random tags by default.
+
+ [16 byte granule implications]
+ Since the shadow memory only has a resolution on real memory of 16 bytes,
+ invalid accesses that are within the same 16 byte granule as a valid
+ address will not be caught.
+
+ There is a "short-granule" feature in the runtime library which does catch
+ such accesses, but this feature is not implemented for stack objects (since
+ stack objects are allocated and tagged by compiler instrumentation, and
+ this feature has not yet been implemented in GCC instrumentation).
+
+ Another outcome of this 16 byte resolution is that each tagged object must
+ be 16 byte aligned. If two objects were to share any 16 byte granule in
+ memory, then they both would have to be given the same tag, and invalid
+ accesses to one using a pointer to the other would be undetectable.
+
+ [Compiler instrumentation]
+ Compiler instrumentation ensures that two adjacent buffers on the stack are
+ given different tags, this means an access to one buffer using a pointer
+ generated from the other (e.g. through buffer overrun) will have mismatched
+ tags and be caught by hwasan.
+
+ We don't randomly tag every object on the stack, since that would require
+ keeping many registers to record each tag. Instead we randomly generate a
+ tag for each function frame, and each new stack object uses a tag offset
+ from that frame tag.
+ i.e. each object is tagged as RFT + offset, where RFT is the "random frame
+ tag" generated for this frame.
+ This means that randomisation does not peturb the difference between tags
+ on tagged stack objects within a frame, but this is mitigated by the fact
+ that objects with the same tag within a frame are very far apart
+ (approx. 2^HWASAN_TAG_SIZE objects apart).
+
+ As a demonstration, using the same example program as in the asan block
+ comment above:
+
+ int
+ foo ()
+ {
+ char a[23] = {0};
+ int b[2] = {0};
+
+ a[5] = 1;
+ b[1] = 2;
+
+ return a[5] + b[1];
+ }
+
+ On AArch64 the stack will be ordered as follows for the above function:
+
+ Slot 1/ [24 bytes for variable 'a']
+ Slot 2/ [8 bytes padding for alignment]
+ Slot 3/ [8 bytes for variable 'b']
+ Slot 4/ [8 bytes padding for alignment]
+
+ (The padding is there to ensure 16 byte alignment as described in the 16
+ byte granule implications).
+
+ While the shadow memory will be ordered as follows:
+
+ - 2 bytes (representing 32 bytes in real memory) tagged with RFT + 1.
+ - 1 byte (representing 16 bytes in real memory) tagged with RFT + 2.
+
+ And any pointer to "a" will have the tag RFT + 1, and any pointer to "b"
+ will have the tag RFT + 2.
+
+ [Top Byte Ignore requirements]
+ Hwasan requires the ability to store an 8 bit tag in every pointer. There
+ is no instrumentation done to remove this tag from pointers before
+ dereferencing, which means the hardware must ignore this tag during memory
+ accesses.
+
+ Architectures where this feature is available should indicate this using
+ the TARGET_MEMTAG_CAN_TAG_ADDRESSES hook.
+
+ [Stack requires cleanup on unwinding]
+ During normal operation of a hwasan sanitized program more space in the
+ shadow memory becomes tagged as the stack grows. As the stack shrinks this
+ shadow memory space must become untagged. If it is not untagged then when
+ the stack grows again (during other function calls later on in the program)
+ objects on the stack that are usually not tagged (e.g. parameters passed on
+ the stack) can be placed in memory whose shadow space is tagged with
+ something else, and accesses can cause false positive reports.
+
+ Hence we place untagging code on every epilogue of functions which tag some
+ stack objects.
+
+ Moreover, the run-time library intercepts longjmp & setjmp to untag when
+ the stack is unwound this way.
+
+ C++ exceptions are not yet handled, which means this sanitizer can not
+ handle C++ code that throws exceptions -- it will give false positives
+ after an exception has been thrown. The implementation that the hwasan
+ library has for handling these relies on the frame pointer being after any
+ local variables. This is not generally the case for GCC. */
+
+
/* Returns whether we are tagging pointers and checking those tags on memory
access. */
bool
return (hwasan_sanitize_stack_p () && param_hwasan_instrument_allocas);
}
+/* Should we instrument reads? */
+bool
+hwasan_instrument_reads (void)
+{
+ return (hwasan_sanitize_p () && param_hwasan_instrument_reads);
+}
+
+/* Should we instrument writes? */
+bool
+hwasan_instrument_writes (void)
+{
+ return (hwasan_sanitize_p () && param_hwasan_instrument_writes);
+}
+
+/* Should we instrument builtin calls? */
+bool
+hwasan_memintrin (void)
+{
+ return (hwasan_sanitize_p () && param_hwasan_instrument_mem_intrinsics);
+}
+
/* Insert code to protect stack vars. The prologue sequence should be emitted
directly, epilogue sequence returned. BASE is the register holding the
stack base, against which OFFSETS array offsets are relative to, OFFSETS
report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
int *nargs)
{
+ gcc_assert (!hwasan_sanitize_p ());
+
static enum built_in_function report[2][2][6]
= { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
gimple *g;
gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
+ gcc_assert (size_in_bytes == -1 || size_in_bytes >= 1);
gsi = *iter;
if (is_scalar_access)
flags |= ASAN_CHECK_SCALAR_ACCESS;
- g = gimple_build_call_internal (IFN_ASAN_CHECK, 4,
+ enum internal_fn fn = hwasan_sanitize_p ()
+ ? IFN_HWASAN_CHECK
+ : IFN_ASAN_CHECK;
+
+ g = gimple_build_call_internal (fn, 4,
build_int_cst (integer_type_node, flags),
base, len,
build_int_cst (integer_type_node,
instrument_derefs (gimple_stmt_iterator *iter, tree t,
location_t location, bool is_store)
{
- if (is_store && !param_asan_instrument_writes)
+ if (is_store && !(asan_instrument_writes () || hwasan_instrument_writes ()))
return;
- if (!is_store && !param_asan_instrument_reads)
+ if (!is_store && !(asan_instrument_reads () || hwasan_instrument_reads ()))
return;
tree type, base;
{
if (DECL_THREAD_LOCAL_P (inner))
return;
- if (!param_asan_globals && is_global_var (inner))
+ /* If we're not sanitizing globals and we can tell statically that this
+ access is inside a global variable, then there's no point adding
+ instrumentation to check the access. N.b. hwasan currently never
+ sanitizes globals. */
+ if ((hwasan_sanitize_p () || !param_asan_globals)
+ && is_global_var (inner))
return;
if (!TREE_STATIC (inner))
{
static bool
instrument_builtin_call (gimple_stmt_iterator *iter)
{
- if (!param_asan_memintrin)
+ if (!(asan_memintrin () || hwasan_memintrin ()))
return false;
bool iter_advanced_p = false;
break;
}
}
- tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
- gimple *g = gimple_build_call (decl, 0);
- gimple_set_location (g, gimple_location (stmt));
- gsi_insert_before (iter, g, GSI_SAME_STMT);
+ /* If a function does not return, then we must handle clearing up the
+ shadow stack accordingly. For ASAN we can simply set the entire stack
+ to "valid" for accesses by setting the shadow space to 0 and all
+ accesses will pass checks. That means that some bad accesses may be
+ missed, but we will not report any false positives.
+
+ This is not possible for HWASAN. Since there is no "always valid" tag
+ we can not set any space to "always valid". If we were to clear the
+ entire shadow stack then code resuming from `longjmp` or a caught
+ exception would trigger false positives when correctly accessing
+ variables on the stack. Hence we need to handle things like
+ `longjmp`, thread exit, and exceptions in a different way. These
+ problems must be handled externally to the compiler, e.g. in the
+ language runtime. */
+ if (! hwasan_sanitize_p ())
+ {
+ tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
+ gimple *g = gimple_build_call (decl, 0);
+ gimple_set_location (g, gimple_location (stmt));
+ gsi_insert_before (iter, g, GSI_SAME_STMT);
+ }
}
bool instrumented = false;
= build_function_type_list (void_type_node, uint64_type_node,
ptr_type_node, NULL_TREE);
+ tree BT_FN_PTR_CONST_PTR_UINT8
+ = build_function_type_list (ptr_type_node, const_ptr_type_node,
+ unsigned_char_type_node, NULL_TREE);
tree BT_FN_VOID_PTR_UINT8_PTRMODE
= build_function_type_list (void_type_node, ptr_type_node,
unsigned_char_type_node,
gcc_checking_assert (TREE_CODE (decl) == VAR_DECL);
+ if (hwasan_sanitize_p ())
+ {
+ gcc_assert (param_hwasan_instrument_stack);
+ gimple_seq stmts = NULL;
+ /* Here we swap ASAN_MARK calls for HWASAN_MARK.
+ This is because we are using the approach of using ASAN_MARK as a
+ synonym until here.
+ That approach means we don't yet have to duplicate all the special
+ cases for ASAN_MARK and ASAN_POISON with the exact same handling but
+ called HWASAN_MARK etc.
+
+ N.b. __asan_poison_stack_memory (which implements ASAN_MARK for ASAN)
+ rounds the size up to its shadow memory granularity, while
+ __hwasan_tag_memory (which implements the same for HWASAN) does not.
+ Hence we emit HWASAN_MARK with an aligned size unlike ASAN_MARK. */
+ tree len = gimple_call_arg (g, 2);
+ tree new_len = gimple_build_round_up (&stmts, loc, size_type_node, len,
+ HWASAN_TAG_GRANULE_SIZE);
+ gimple_build (&stmts, loc, CFN_HWASAN_MARK,
+ void_type_node, gimple_call_arg (g, 0),
+ base, new_len);
+ gsi_replace_with_seq (iter, stmts, true);
+ return false;
+ }
+
if (is_poison)
{
if (asan_handled_variables == NULL)
bool
asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
{
+ gcc_assert (!hwasan_sanitize_p ());
gimple *g = gsi_stmt (*iter);
location_t loc = gimple_location (g);
bool recover_p;
int nargs;
bool store_p = gimple_call_internal_p (use, IFN_ASAN_POISON_USE);
- tree fun = report_error_func (store_p, recover_p, tree_to_uhwi (size),
- &nargs);
-
- gcall *call = gimple_build_call (fun, 1,
- build_fold_addr_expr (shadow_var));
+ gcall *call;
+ if (hwasan_sanitize_p ())
+ {
+ tree fun = builtin_decl_implicit (BUILT_IN_HWASAN_TAG_MISMATCH4);
+ /* NOTE: hwasan has no __hwasan_report_* functions like asan does.
+ We use __hwasan_tag_mismatch4 with arguments that tell it the
+ size of access and load to report all tag mismatches.
+
+ The arguments to this function are:
+ Address of invalid access.
+ Bitfield containing information about the access
+ (access_info)
+ Pointer to a frame of registers
+ (for use in printing the contents of registers in a dump)
+ Not used yet -- to be used by inline instrumentation.
+ Size of access.
+
+ The access_info bitfield encodes the following pieces of
+ information:
+ - Is this a store or load?
+ access_info & 0x10 => store
+ - Should the program continue after reporting the error?
+ access_info & 0x20 => recover
+ - What size access is this (not used here since we can always
+ pass the size in the last argument)
+
+ if (access_info & 0xf == 0xf)
+ size is taken from last argument.
+ else
+ size == 1 << (access_info & 0xf)
+
+ The last argument contains the size of the access iff the
+ access_info size indicator is 0xf (we always use this argument
+ rather than storing the size in the access_info bitfield).
+
+ See the function definition `__hwasan_tag_mismatch4` in
+ libsanitizer/hwasan for the full definition.
+ */
+ unsigned access_info = (0x20 * recover_p)
+ + (0x10 * store_p)
+ + (0xf);
+ call = gimple_build_call (fun, 4,
+ build_fold_addr_expr (shadow_var),
+ build_int_cst (pointer_sized_int_node,
+ access_info),
+ build_int_cst (pointer_sized_int_node, 0),
+ size);
+ }
+ else
+ {
+ tree fun = report_error_func (store_p, recover_p, tree_to_uhwi (size),
+ &nargs);
+ call = gimple_build_call (fun, 1,
+ build_fold_addr_expr (shadow_var));
+ }
gimple_set_location (call, gimple_location (use));
gimple *call_to_insert = call;
static unsigned int
asan_instrument (void)
{
+ if (hwasan_sanitize_p ())
+ {
+ transform_statements ();
+ return 0;
+ }
+
if (shadow_ptr_types[0] == NULL_TREE)
asan_init_shadow_ptr_types ();
transform_statements ();
/* opt_pass methods: */
opt_pass * clone () { return new pass_asan (m_ctxt); }
- virtual bool gate (function *) { return gate_asan (); }
+ virtual bool gate (function *) { return gate_asan () || gate_hwasan (); }
virtual unsigned int execute (function *) { return asan_instrument (); }
}; // class pass_asan
{}
/* opt_pass methods: */
- virtual bool gate (function *) { return !optimize && gate_asan (); }
+ virtual bool gate (function *)
+ {
+ return !optimize && (gate_asan () || gate_hwasan ());
+ }
virtual unsigned int execute (function *) { return asan_instrument (); }
}; // class pass_asan_O0
return new pass_asan_O0 (ctxt);
}
+/* HWASAN */
+
/* For stack tagging:
Return the offset from the frame base tag that the "next" expanded object
return tag;
}
+/* Construct a function tree for __hwasan_{load,store}{1,2,4,8,16,_n}.
+ IS_STORE is either 1 (for a store) or 0 (for a load). */
+static combined_fn
+hwasan_check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
+ int *nargs)
+{
+ static enum built_in_function check[2][2][6]
+ = { { { BUILT_IN_HWASAN_LOAD1, BUILT_IN_HWASAN_LOAD2,
+ BUILT_IN_HWASAN_LOAD4, BUILT_IN_HWASAN_LOAD8,
+ BUILT_IN_HWASAN_LOAD16, BUILT_IN_HWASAN_LOADN },
+ { BUILT_IN_HWASAN_STORE1, BUILT_IN_HWASAN_STORE2,
+ BUILT_IN_HWASAN_STORE4, BUILT_IN_HWASAN_STORE8,
+ BUILT_IN_HWASAN_STORE16, BUILT_IN_HWASAN_STOREN } },
+ { { BUILT_IN_HWASAN_LOAD1_NOABORT,
+ BUILT_IN_HWASAN_LOAD2_NOABORT,
+ BUILT_IN_HWASAN_LOAD4_NOABORT,
+ BUILT_IN_HWASAN_LOAD8_NOABORT,
+ BUILT_IN_HWASAN_LOAD16_NOABORT,
+ BUILT_IN_HWASAN_LOADN_NOABORT },
+ { BUILT_IN_HWASAN_STORE1_NOABORT,
+ BUILT_IN_HWASAN_STORE2_NOABORT,
+ BUILT_IN_HWASAN_STORE4_NOABORT,
+ BUILT_IN_HWASAN_STORE8_NOABORT,
+ BUILT_IN_HWASAN_STORE16_NOABORT,
+ BUILT_IN_HWASAN_STOREN_NOABORT } } };
+ if (size_in_bytes == -1)
+ {
+ *nargs = 2;
+ return as_combined_fn (check[recover_p][is_store][5]);
+ }
+ *nargs = 1;
+ int size_log2 = exact_log2 (size_in_bytes);
+ gcc_assert (size_log2 >= 0 && size_log2 <= 5);
+ return as_combined_fn (check[recover_p][is_store][size_log2]);
+}
+
+/* Expand the HWASAN_{LOAD,STORE} builtins. */
+bool
+hwasan_expand_check_ifn (gimple_stmt_iterator *iter, bool)
+{
+ gimple *g = gsi_stmt (*iter);
+ location_t loc = gimple_location (g);
+ bool recover_p;
+ if (flag_sanitize & SANITIZE_USER_HWADDRESS)
+ recover_p = (flag_sanitize_recover & SANITIZE_USER_HWADDRESS) != 0;
+ else
+ recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_HWADDRESS) != 0;
+
+ HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
+ gcc_assert (flags < ASAN_CHECK_LAST);
+ bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
+ bool is_store = (flags & ASAN_CHECK_STORE) != 0;
+ bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
+
+ tree base = gimple_call_arg (g, 1);
+ tree len = gimple_call_arg (g, 2);
+
+ /* `align` is unused for HWASAN_CHECK, but we pass the argument anyway
+ since that way the arguments match ASAN_CHECK. */
+ /* HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3)); */
+
+ unsigned HOST_WIDE_INT size_in_bytes
+ = is_scalar_access ? tree_to_shwi (len) : -1;
+
+ gimple_stmt_iterator gsi = *iter;
+
+ if (!is_non_zero_len)
+ {
+ /* So, the length of the memory area to hwasan-protect is
+ non-constant. Let's guard the generated instrumentation code
+ like:
+
+ if (len != 0)
+ {
+ // hwasan instrumentation code goes here.
+ }
+ // falltrough instructions, starting with *ITER. */
+
+ g = gimple_build_cond (NE_EXPR,
+ len,
+ build_int_cst (TREE_TYPE (len), 0),
+ NULL_TREE, NULL_TREE);
+ gimple_set_location (g, loc);
+
+ basic_block then_bb, fallthrough_bb;
+ insert_if_then_before_iter (as_a <gcond *> (g), iter,
+ /*then_more_likely_p=*/true,
+ &then_bb, &fallthrough_bb);
+ /* Note that fallthrough_bb starts with the statement that was
+ pointed to by ITER. */
+
+ /* The 'then block' of the 'if (len != 0) condition is where
+ we'll generate the hwasan instrumentation code now. */
+ gsi = gsi_last_bb (then_bb);
+ }
+
+ gimple_seq stmts = NULL;
+ tree base_addr = gimple_build (&stmts, loc, NOP_EXPR,
+ pointer_sized_int_node, base);
+
+ int nargs = 0;
+ combined_fn fn
+ = hwasan_check_func (is_store, recover_p, size_in_bytes, &nargs);
+ if (nargs == 1)
+ gimple_build (&stmts, loc, fn, void_type_node, base_addr);
+ else
+ {
+ gcc_assert (nargs == 2);
+ tree sz_arg = gimple_build (&stmts, loc, NOP_EXPR,
+ pointer_sized_int_node, len);
+ gimple_build (&stmts, loc, fn, void_type_node, base_addr, sz_arg);
+ }
+
+ gsi_insert_seq_after (&gsi, stmts, GSI_NEW_STMT);
+ gsi_remove (iter, true);
+ *iter = gsi;
+ return false;
+}
+
+/* For stack tagging:
+
+ Dummy: the HWASAN_MARK internal function should only ever be in the code
+ after the sanopt pass. */
+bool
+hwasan_expand_mark_ifn (gimple_stmt_iterator *)
+{
+ gcc_unreachable ();
+}
+
+bool
+gate_hwasan ()
+{
+ return hwasan_sanitize_p ();
+}
+
#include "gt-asan.h"