1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2014 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
26 #include "hash-table.h"
33 #include "hard-reg-set.h"
36 #include "dominance.h"
39 #include "basic-block.h"
40 #include "tree-ssa-alias.h"
41 #include "internal-fn.h"
42 #include "gimple-expr.h"
47 #include "gimple-iterator.h"
50 #include "stor-layout.h"
51 #include "tree-iterator.h"
53 #include "stringpool.h"
54 #include "tree-ssanames.h"
55 #include "tree-pass.h"
57 #include "gimple-pretty-print.h"
63 #include "langhooks.h"
64 #include "alloc-pool.h"
66 #include "gimple-builder.h"
71 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
72 with <2x slowdown on average.
74 The tool consists of two parts:
75 instrumentation module (this file) and a run-time library.
76 The instrumentation module adds a run-time check before every memory insn.
77 For a 8- or 16- byte load accessing address X:
78 ShadowAddr = (X >> 3) + Offset
79 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
81 __asan_report_load8(X);
82 For a load of N bytes (N=1, 2 or 4) from address X:
83 ShadowAddr = (X >> 3) + Offset
84 ShadowValue = *(char*)ShadowAddr;
86 if ((X & 7) + N - 1 > ShadowValue)
87 __asan_report_loadN(X);
88 Stores are instrumented similarly, but using __asan_report_storeN functions.
89 A call too __asan_init_vN() is inserted to the list of module CTORs.
90 N is the version number of the AddressSanitizer API. The changes between the
91 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
93 The run-time library redefines malloc (so that redzone are inserted around
94 the allocated memory) and free (so that reuse of free-ed memory is delayed),
95 provides __asan_report* and __asan_init_vN functions.
98 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
100 The current implementation supports detection of out-of-bounds and
101 use-after-free in the heap, on the stack and for global variables.
103 [Protection of stack variables]
105 To understand how detection of out-of-bounds and use-after-free works
106 for stack variables, lets look at this example on x86_64 where the
107 stack grows downward:
121 For this function, the stack protected by asan will be organized as
122 follows, from the top of the stack to the bottom:
124 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
126 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
127 the next slot be 32 bytes aligned; this one is called Partial
128 Redzone; this 32 bytes alignment is an asan constraint]
130 Slot 3/ [24 bytes for variable 'a']
132 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
134 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
136 Slot 6/ [8 bytes for variable 'b']
138 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
141 The 32 bytes of LEFT red zone at the bottom of the stack can be
144 1/ The first 8 bytes contain a magical asan number that is always
147 2/ The following 8 bytes contains a pointer to a string (to be
148 parsed at runtime by the runtime asan library), which format is
151 "<function-name> <space> <num-of-variables-on-the-stack>
152 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
153 <length-of-var-in-bytes> ){n} "
155 where '(...){n}' means the content inside the parenthesis occurs 'n'
156 times, with 'n' being the number of variables on the stack.
158 3/ The following 8 bytes contain the PC of the current function which
159 will be used by the run-time library to print an error message.
161 4/ The following 8 bytes are reserved for internal use by the run-time.
163 The shadow memory for that stack layout is going to look like this:
165 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
166 The F1 byte pattern is a magic number called
167 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
168 the memory for that shadow byte is part of a the LEFT red zone
169 intended to seat at the bottom of the variables on the stack.
171 - content of shadow memory 8 bytes for slots 6 and 5:
172 0xF4F4F400. The F4 byte pattern is a magic number
173 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
174 memory region for this shadow byte is a PARTIAL red zone
175 intended to pad a variable A, so that the slot following
176 {A,padding} is 32 bytes aligned.
178 Note that the fact that the least significant byte of this
179 shadow memory content is 00 means that 8 bytes of its
180 corresponding memory (which corresponds to the memory of
181 variable 'b') is addressable.
183 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
184 The F2 byte pattern is a magic number called
185 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
186 region for this shadow byte is a MIDDLE red zone intended to
187 seat between two 32 aligned slots of {variable,padding}.
189 - content of shadow memory 8 bytes for slot 3 and 2:
190 0xF4000000. This represents is the concatenation of
191 variable 'a' and the partial red zone following it, like what we
192 had for variable 'b'. The least significant 3 bytes being 00
193 means that the 3 bytes of variable 'a' are addressable.
195 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
196 The F3 byte pattern is a magic number called
197 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
198 region for this shadow byte is a RIGHT red zone intended to seat
199 at the top of the variables of the stack.
201 Note that the real variable layout is done in expand_used_vars in
202 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
203 stack variables as well as the different red zones, emits some
204 prologue code to populate the shadow memory as to poison (mark as
205 non-accessible) the regions of the red zones and mark the regions of
206 stack variables as accessible, and emit some epilogue code to
207 un-poison (mark as accessible) the regions of red zones right before
210 [Protection of global variables]
212 The basic idea is to insert a red zone between two global variables
213 and install a constructor function that calls the asan runtime to do
214 the populating of the relevant shadow memory regions at load time.
216 So the global variables are laid out as to insert a red zone between
217 them. The size of the red zones is so that each variable starts on a
220 Then a constructor function is installed so that, for each global
221 variable, it calls the runtime asan library function
222 __asan_register_globals_with an instance of this type:
226 // Address of the beginning of the global variable.
229 // Initial size of the global variable.
232 // Size of the global variable + size of the red zone. This
233 // size is 32 bytes aligned.
234 uptr __size_with_redzone;
236 // Name of the global variable.
239 // Name of the module where the global variable is declared.
240 const void *__module_name;
242 // 1 if it has dynamic initialization, 0 otherwise.
243 uptr __has_dynamic_init;
245 // A pointer to struct that contains source location, could be NULL.
246 __asan_global_source_location *__location;
249 A destructor function that calls the runtime asan library function
250 _asan_unregister_globals is also installed. */
252 static unsigned HOST_WIDE_INT asan_shadow_offset_value
;
253 static bool asan_shadow_offset_computed
;
255 /* Sets shadow offset to value in string VAL. */
258 set_asan_shadow_offset (const char *val
)
263 #ifdef HAVE_LONG_LONG
264 asan_shadow_offset_value
= strtoull (val
, &endp
, 0);
266 asan_shadow_offset_value
= strtoul (val
, &endp
, 0);
268 if (!(*val
!= '\0' && *endp
== '\0' && errno
== 0))
271 asan_shadow_offset_computed
= true;
276 /* Returns Asan shadow offset. */
278 static unsigned HOST_WIDE_INT
279 asan_shadow_offset ()
281 if (!asan_shadow_offset_computed
)
283 asan_shadow_offset_computed
= true;
284 asan_shadow_offset_value
= targetm
.asan_shadow_offset ();
286 return asan_shadow_offset_value
;
289 alias_set_type asan_shadow_set
= -1;
291 /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
292 alias set is used for all shadow memory accesses. */
293 static GTY(()) tree shadow_ptr_types
[2];
295 /* Decl for __asan_option_detect_stack_use_after_return. */
296 static GTY(()) tree asan_detect_stack_use_after_return
;
298 /* Various flags for Asan builtins. */
299 enum asan_check_flags
301 ASAN_CHECK_STORE
= 1 << 0,
302 ASAN_CHECK_SCALAR_ACCESS
= 1 << 1,
303 ASAN_CHECK_NON_ZERO_LEN
= 1 << 2,
304 ASAN_CHECK_START_INSTRUMENTED
= 1 << 3,
305 ASAN_CHECK_END_INSTRUMENTED
= 1 << 4,
309 /* Hashtable support for memory references used by gimple
312 /* This type represents a reference to a memory region. */
315 /* The expression of the beginning of the memory region. */
318 /* The size of the access. */
319 HOST_WIDE_INT access_size
;
322 static alloc_pool asan_mem_ref_alloc_pool
;
324 /* This creates the alloc pool used to store the instances of
325 asan_mem_ref that are stored in the hash table asan_mem_ref_ht. */
328 asan_mem_ref_get_alloc_pool ()
330 if (asan_mem_ref_alloc_pool
== NULL
)
331 asan_mem_ref_alloc_pool
= create_alloc_pool ("asan_mem_ref",
332 sizeof (asan_mem_ref
),
334 return asan_mem_ref_alloc_pool
;
338 /* Initializes an instance of asan_mem_ref. */
341 asan_mem_ref_init (asan_mem_ref
*ref
, tree start
, HOST_WIDE_INT access_size
)
344 ref
->access_size
= access_size
;
347 /* Allocates memory for an instance of asan_mem_ref into the memory
348 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
349 START is the address of (or the expression pointing to) the
350 beginning of memory reference. ACCESS_SIZE is the size of the
351 access to the referenced memory. */
354 asan_mem_ref_new (tree start
, HOST_WIDE_INT access_size
)
357 (asan_mem_ref
*) pool_alloc (asan_mem_ref_get_alloc_pool ());
359 asan_mem_ref_init (ref
, start
, access_size
);
363 /* This builds and returns a pointer to the end of the memory region
364 that starts at START and of length LEN. */
367 asan_mem_ref_get_end (tree start
, tree len
)
369 if (len
== NULL_TREE
|| integer_zerop (len
))
372 if (!ptrofftype_p (len
))
373 len
= convert_to_ptrofftype (len
);
375 return fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (start
), start
, len
);
378 /* Return a tree expression that represents the end of the referenced
379 memory region. Beware that this function can actually build a new
383 asan_mem_ref_get_end (const asan_mem_ref
*ref
, tree len
)
385 return asan_mem_ref_get_end (ref
->start
, len
);
388 struct asan_mem_ref_hasher
389 : typed_noop_remove
<asan_mem_ref
>
391 typedef asan_mem_ref value_type
;
392 typedef asan_mem_ref compare_type
;
394 static inline hashval_t
hash (const value_type
*);
395 static inline bool equal (const value_type
*, const compare_type
*);
398 /* Hash a memory reference. */
401 asan_mem_ref_hasher::hash (const asan_mem_ref
*mem_ref
)
403 inchash::hash hstate
;
404 inchash::add_expr (mem_ref
->start
, hstate
);
405 hstate
.add_wide_int (mem_ref
->access_size
);
406 return hstate
.end ();
409 /* Compare two memory references. We accept the length of either
410 memory references to be NULL_TREE. */
413 asan_mem_ref_hasher::equal (const asan_mem_ref
*m1
,
414 const asan_mem_ref
*m2
)
416 return (m1
->access_size
== m2
->access_size
417 && operand_equal_p (m1
->start
, m2
->start
, 0));
420 static hash_table
<asan_mem_ref_hasher
> *asan_mem_ref_ht
;
422 /* Returns a reference to the hash table containing memory references.
423 This function ensures that the hash table is created. Note that
424 this hash table is updated by the function
425 update_mem_ref_hash_table. */
427 static hash_table
<asan_mem_ref_hasher
> *
428 get_mem_ref_hash_table ()
430 if (!asan_mem_ref_ht
)
431 asan_mem_ref_ht
= new hash_table
<asan_mem_ref_hasher
> (10);
433 return asan_mem_ref_ht
;
436 /* Clear all entries from the memory references hash table. */
439 empty_mem_ref_hash_table ()
442 asan_mem_ref_ht
->empty ();
445 /* Free the memory references hash table. */
448 free_mem_ref_resources ()
450 delete asan_mem_ref_ht
;
451 asan_mem_ref_ht
= NULL
;
453 if (asan_mem_ref_alloc_pool
)
455 free_alloc_pool (asan_mem_ref_alloc_pool
);
456 asan_mem_ref_alloc_pool
= NULL
;
460 /* Return true iff the memory reference REF has been instrumented. */
463 has_mem_ref_been_instrumented (tree ref
, HOST_WIDE_INT access_size
)
466 asan_mem_ref_init (&r
, ref
, access_size
);
468 return (get_mem_ref_hash_table ()->find (&r
) != NULL
);
471 /* Return true iff the memory reference REF has been instrumented. */
474 has_mem_ref_been_instrumented (const asan_mem_ref
*ref
)
476 return has_mem_ref_been_instrumented (ref
->start
, ref
->access_size
);
479 /* Return true iff access to memory region starting at REF and of
480 length LEN has been instrumented. */
483 has_mem_ref_been_instrumented (const asan_mem_ref
*ref
, tree len
)
485 /* First let's see if the address of the beginning of REF has been
487 if (!has_mem_ref_been_instrumented (ref
))
492 /* Let's see if the end of the region has been instrumented. */
493 if (!has_mem_ref_been_instrumented (asan_mem_ref_get_end (ref
, len
),
500 /* Set REF to the memory reference present in a gimple assignment
501 ASSIGNMENT. Return true upon successful completion, false
505 get_mem_ref_of_assignment (const gimple assignment
,
509 gcc_assert (gimple_assign_single_p (assignment
));
511 if (gimple_store_p (assignment
)
512 && !gimple_clobber_p (assignment
))
514 ref
->start
= gimple_assign_lhs (assignment
);
515 *ref_is_store
= true;
517 else if (gimple_assign_load_p (assignment
))
519 ref
->start
= gimple_assign_rhs1 (assignment
);
520 *ref_is_store
= false;
525 ref
->access_size
= int_size_in_bytes (TREE_TYPE (ref
->start
));
529 /* Return the memory references contained in a gimple statement
530 representing a builtin call that has to do with memory access. */
533 get_mem_refs_of_builtin_call (const gimple call
,
545 gcc_checking_assert (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
));
547 tree callee
= gimple_call_fndecl (call
);
548 tree source0
= NULL_TREE
, source1
= NULL_TREE
,
549 dest
= NULL_TREE
, len
= NULL_TREE
;
550 bool is_store
= true, got_reference_p
= false;
551 HOST_WIDE_INT access_size
= 1;
553 switch (DECL_FUNCTION_CODE (callee
))
555 /* (s, s, n) style memops. */
557 case BUILT_IN_MEMCMP
:
558 source0
= gimple_call_arg (call
, 0);
559 source1
= gimple_call_arg (call
, 1);
560 len
= gimple_call_arg (call
, 2);
563 /* (src, dest, n) style memops. */
565 source0
= gimple_call_arg (call
, 0);
566 dest
= gimple_call_arg (call
, 1);
567 len
= gimple_call_arg (call
, 2);
570 /* (dest, src, n) style memops. */
571 case BUILT_IN_MEMCPY
:
572 case BUILT_IN_MEMCPY_CHK
:
573 case BUILT_IN_MEMMOVE
:
574 case BUILT_IN_MEMMOVE_CHK
:
575 case BUILT_IN_MEMPCPY
:
576 case BUILT_IN_MEMPCPY_CHK
:
577 dest
= gimple_call_arg (call
, 0);
578 source0
= gimple_call_arg (call
, 1);
579 len
= gimple_call_arg (call
, 2);
582 /* (dest, n) style memops. */
584 dest
= gimple_call_arg (call
, 0);
585 len
= gimple_call_arg (call
, 1);
588 /* (dest, x, n) style memops*/
589 case BUILT_IN_MEMSET
:
590 case BUILT_IN_MEMSET_CHK
:
591 dest
= gimple_call_arg (call
, 0);
592 len
= gimple_call_arg (call
, 2);
595 case BUILT_IN_STRLEN
:
596 source0
= gimple_call_arg (call
, 0);
597 len
= gimple_call_lhs (call
);
600 /* And now the __atomic* and __sync builtins.
601 These are handled differently from the classical memory memory
602 access builtins above. */
604 case BUILT_IN_ATOMIC_LOAD_1
:
605 case BUILT_IN_ATOMIC_LOAD_2
:
606 case BUILT_IN_ATOMIC_LOAD_4
:
607 case BUILT_IN_ATOMIC_LOAD_8
:
608 case BUILT_IN_ATOMIC_LOAD_16
:
612 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
613 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
614 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
615 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
616 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
618 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
619 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
620 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
621 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
622 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
624 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
625 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
626 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
627 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
628 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
630 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
631 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
632 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
633 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
634 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
636 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
637 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
638 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
639 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
640 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
642 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
643 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
644 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
645 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
647 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
648 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
649 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
650 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
651 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
653 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
654 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
655 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
656 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
657 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
659 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
660 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
661 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
662 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
663 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
665 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
666 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
667 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
668 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
669 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
671 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
672 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
673 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
674 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
675 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
677 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
678 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
679 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
680 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
682 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
683 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
684 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
685 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
686 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
688 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
689 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
690 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
691 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
692 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
694 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
695 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
696 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
697 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
698 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
700 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
701 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
702 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
703 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
704 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
706 case BUILT_IN_ATOMIC_EXCHANGE_1
:
707 case BUILT_IN_ATOMIC_EXCHANGE_2
:
708 case BUILT_IN_ATOMIC_EXCHANGE_4
:
709 case BUILT_IN_ATOMIC_EXCHANGE_8
:
710 case BUILT_IN_ATOMIC_EXCHANGE_16
:
712 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
713 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
714 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
715 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
716 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
718 case BUILT_IN_ATOMIC_STORE_1
:
719 case BUILT_IN_ATOMIC_STORE_2
:
720 case BUILT_IN_ATOMIC_STORE_4
:
721 case BUILT_IN_ATOMIC_STORE_8
:
722 case BUILT_IN_ATOMIC_STORE_16
:
724 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
725 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
726 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
727 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
728 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
730 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
731 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
732 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
733 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
734 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
736 case BUILT_IN_ATOMIC_AND_FETCH_1
:
737 case BUILT_IN_ATOMIC_AND_FETCH_2
:
738 case BUILT_IN_ATOMIC_AND_FETCH_4
:
739 case BUILT_IN_ATOMIC_AND_FETCH_8
:
740 case BUILT_IN_ATOMIC_AND_FETCH_16
:
742 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
743 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
744 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
745 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
746 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
748 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
749 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
750 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
751 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
752 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
754 case BUILT_IN_ATOMIC_OR_FETCH_1
:
755 case BUILT_IN_ATOMIC_OR_FETCH_2
:
756 case BUILT_IN_ATOMIC_OR_FETCH_4
:
757 case BUILT_IN_ATOMIC_OR_FETCH_8
:
758 case BUILT_IN_ATOMIC_OR_FETCH_16
:
760 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
761 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
762 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
763 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
764 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
766 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
767 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
768 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
769 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
770 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
772 case BUILT_IN_ATOMIC_FETCH_AND_1
:
773 case BUILT_IN_ATOMIC_FETCH_AND_2
:
774 case BUILT_IN_ATOMIC_FETCH_AND_4
:
775 case BUILT_IN_ATOMIC_FETCH_AND_8
:
776 case BUILT_IN_ATOMIC_FETCH_AND_16
:
778 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
779 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
780 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
781 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
782 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
784 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
785 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
786 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
787 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
788 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
790 case BUILT_IN_ATOMIC_FETCH_OR_1
:
791 case BUILT_IN_ATOMIC_FETCH_OR_2
:
792 case BUILT_IN_ATOMIC_FETCH_OR_4
:
793 case BUILT_IN_ATOMIC_FETCH_OR_8
:
794 case BUILT_IN_ATOMIC_FETCH_OR_16
:
796 dest
= gimple_call_arg (call
, 0);
797 /* DEST represents the address of a memory location.
798 instrument_derefs wants the memory location, so lets
799 dereference the address DEST before handing it to
800 instrument_derefs. */
801 if (TREE_CODE (dest
) == ADDR_EXPR
)
802 dest
= TREE_OPERAND (dest
, 0);
803 else if (TREE_CODE (dest
) == SSA_NAME
|| TREE_CODE (dest
) == INTEGER_CST
)
804 dest
= build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (dest
)),
805 dest
, build_int_cst (TREE_TYPE (dest
), 0));
809 access_size
= int_size_in_bytes (TREE_TYPE (dest
));
813 /* The other builtins memory access are not instrumented in this
814 function because they either don't have any length parameter,
815 or their length parameter is just a limit. */
819 if (len
!= NULL_TREE
)
821 if (source0
!= NULL_TREE
)
823 src0
->start
= source0
;
824 src0
->access_size
= access_size
;
826 *src0_is_store
= false;
829 if (source1
!= NULL_TREE
)
831 src1
->start
= source1
;
832 src1
->access_size
= access_size
;
834 *src1_is_store
= false;
837 if (dest
!= NULL_TREE
)
840 dst
->access_size
= access_size
;
842 *dst_is_store
= true;
845 got_reference_p
= true;
850 dst
->access_size
= access_size
;
851 *dst_len
= NULL_TREE
;
852 *dst_is_store
= is_store
;
853 *dest_is_deref
= true;
854 got_reference_p
= true;
857 return got_reference_p
;
860 /* Return true iff a given gimple statement has been instrumented.
861 Note that the statement is "defined" by the memory references it
865 has_stmt_been_instrumented_p (gimple stmt
)
867 if (gimple_assign_single_p (stmt
))
871 asan_mem_ref_init (&r
, NULL
, 1);
873 if (get_mem_ref_of_assignment (stmt
, &r
, &r_is_store
))
874 return has_mem_ref_been_instrumented (&r
);
876 else if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
878 asan_mem_ref src0
, src1
, dest
;
879 asan_mem_ref_init (&src0
, NULL
, 1);
880 asan_mem_ref_init (&src1
, NULL
, 1);
881 asan_mem_ref_init (&dest
, NULL
, 1);
883 tree src0_len
= NULL_TREE
, src1_len
= NULL_TREE
, dest_len
= NULL_TREE
;
884 bool src0_is_store
= false, src1_is_store
= false,
885 dest_is_store
= false, dest_is_deref
= false;
886 if (get_mem_refs_of_builtin_call (stmt
,
887 &src0
, &src0_len
, &src0_is_store
,
888 &src1
, &src1_len
, &src1_is_store
,
889 &dest
, &dest_len
, &dest_is_store
,
892 if (src0
.start
!= NULL_TREE
893 && !has_mem_ref_been_instrumented (&src0
, src0_len
))
896 if (src1
.start
!= NULL_TREE
897 && !has_mem_ref_been_instrumented (&src1
, src1_len
))
900 if (dest
.start
!= NULL_TREE
901 && !has_mem_ref_been_instrumented (&dest
, dest_len
))
910 /* Insert a memory reference into the hash table. */
913 update_mem_ref_hash_table (tree ref
, HOST_WIDE_INT access_size
)
915 hash_table
<asan_mem_ref_hasher
> *ht
= get_mem_ref_hash_table ();
918 asan_mem_ref_init (&r
, ref
, access_size
);
920 asan_mem_ref
**slot
= ht
->find_slot (&r
, INSERT
);
922 *slot
= asan_mem_ref_new (ref
, access_size
);
925 /* Initialize shadow_ptr_types array. */
928 asan_init_shadow_ptr_types (void)
930 asan_shadow_set
= new_alias_set ();
931 shadow_ptr_types
[0] = build_distinct_type_copy (signed_char_type_node
);
932 TYPE_ALIAS_SET (shadow_ptr_types
[0]) = asan_shadow_set
;
933 shadow_ptr_types
[0] = build_pointer_type (shadow_ptr_types
[0]);
934 shadow_ptr_types
[1] = build_distinct_type_copy (short_integer_type_node
);
935 TYPE_ALIAS_SET (shadow_ptr_types
[1]) = asan_shadow_set
;
936 shadow_ptr_types
[1] = build_pointer_type (shadow_ptr_types
[1]);
937 initialize_sanitizer_builtins ();
940 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
943 asan_pp_string (pretty_printer
*pp
)
945 const char *buf
= pp_formatted_text (pp
);
946 size_t len
= strlen (buf
);
947 tree ret
= build_string (len
+ 1, buf
);
949 = build_array_type (TREE_TYPE (shadow_ptr_types
[0]),
950 build_index_type (size_int (len
)));
951 TREE_READONLY (ret
) = 1;
952 TREE_STATIC (ret
) = 1;
953 return build1 (ADDR_EXPR
, shadow_ptr_types
[0], ret
);
956 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
959 asan_shadow_cst (unsigned char shadow_bytes
[4])
962 unsigned HOST_WIDE_INT val
= 0;
963 gcc_assert (WORDS_BIG_ENDIAN
== BYTES_BIG_ENDIAN
);
964 for (i
= 0; i
< 4; i
++)
965 val
|= (unsigned HOST_WIDE_INT
) shadow_bytes
[BYTES_BIG_ENDIAN
? 3 - i
: i
]
966 << (BITS_PER_UNIT
* i
);
967 return gen_int_mode (val
, SImode
);
970 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
974 asan_clear_shadow (rtx shadow_mem
, HOST_WIDE_INT len
)
976 rtx_insn
*insn
, *insns
, *jump
;
977 rtx_code_label
*top_label
;
981 clear_storage (shadow_mem
, GEN_INT (len
), BLOCK_OP_NORMAL
);
982 insns
= get_insns ();
984 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
987 if (insn
== NULL_RTX
)
993 gcc_assert ((len
& 3) == 0);
994 top_label
= gen_label_rtx ();
995 addr
= copy_to_mode_reg (Pmode
, XEXP (shadow_mem
, 0));
996 shadow_mem
= adjust_automodify_address (shadow_mem
, SImode
, addr
, 0);
997 end
= force_reg (Pmode
, plus_constant (Pmode
, addr
, len
));
998 emit_label (top_label
);
1000 emit_move_insn (shadow_mem
, const0_rtx
);
1001 tmp
= expand_simple_binop (Pmode
, PLUS
, addr
, gen_int_mode (4, Pmode
), addr
,
1002 true, OPTAB_LIB_WIDEN
);
1004 emit_move_insn (addr
, tmp
);
1005 emit_cmp_and_jump_insns (addr
, end
, LT
, NULL_RTX
, Pmode
, true, top_label
);
1006 jump
= get_last_insn ();
1007 gcc_assert (JUMP_P (jump
));
1008 add_int_reg_note (jump
, REG_BR_PROB
, REG_BR_PROB_BASE
* 80 / 100);
1012 asan_function_start (void)
1014 section
*fnsec
= function_section (current_function_decl
);
1015 switch_to_section (fnsec
);
1016 ASM_OUTPUT_DEBUG_LABEL (asm_out_file
, "LASANPC",
1017 current_function_funcdef_no
);
1020 /* Insert code to protect stack vars. The prologue sequence should be emitted
1021 directly, epilogue sequence returned. BASE is the register holding the
1022 stack base, against which OFFSETS array offsets are relative to, OFFSETS
1023 array contains pairs of offsets in reverse order, always the end offset
1024 of some gap that needs protection followed by starting offset,
1025 and DECLS is an array of representative decls for each var partition.
1026 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1027 elements long (OFFSETS include gap before the first variable as well
1028 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
1029 register which stack vars DECL_RTLs are based on. Either BASE should be
1030 assigned to PBASE, when not doing use after return protection, or
1031 corresponding address based on __asan_stack_malloc* return value. */
1034 asan_emit_stack_protection (rtx base
, rtx pbase
, unsigned int alignb
,
1035 HOST_WIDE_INT
*offsets
, tree
*decls
, int length
)
1037 rtx shadow_base
, shadow_mem
, ret
, mem
, orig_base
;
1038 rtx_code_label
*lab
;
1041 unsigned char shadow_bytes
[4];
1042 HOST_WIDE_INT base_offset
= offsets
[length
- 1];
1043 HOST_WIDE_INT base_align_bias
= 0, offset
, prev_offset
;
1044 HOST_WIDE_INT asan_frame_size
= offsets
[0] - base_offset
;
1045 HOST_WIDE_INT last_offset
, last_size
;
1047 unsigned char cur_shadow_byte
= ASAN_STACK_MAGIC_LEFT
;
1048 tree str_cst
, decl
, id
;
1049 int use_after_return_class
= -1;
1051 if (shadow_ptr_types
[0] == NULL_TREE
)
1052 asan_init_shadow_ptr_types ();
1054 /* First of all, prepare the description string. */
1055 pretty_printer asan_pp
;
1057 pp_decimal_int (&asan_pp
, length
/ 2 - 1);
1058 pp_space (&asan_pp
);
1059 for (l
= length
- 2; l
; l
-= 2)
1061 tree decl
= decls
[l
/ 2 - 1];
1062 pp_wide_integer (&asan_pp
, offsets
[l
] - base_offset
);
1063 pp_space (&asan_pp
);
1064 pp_wide_integer (&asan_pp
, offsets
[l
- 1] - offsets
[l
]);
1065 pp_space (&asan_pp
);
1066 if (DECL_P (decl
) && DECL_NAME (decl
))
1068 pp_decimal_int (&asan_pp
, IDENTIFIER_LENGTH (DECL_NAME (decl
)));
1069 pp_space (&asan_pp
);
1070 pp_tree_identifier (&asan_pp
, DECL_NAME (decl
));
1073 pp_string (&asan_pp
, "9 <unknown>");
1074 pp_space (&asan_pp
);
1076 str_cst
= asan_pp_string (&asan_pp
);
1078 /* Emit the prologue sequence. */
1079 if (asan_frame_size
> 32 && asan_frame_size
<= 65536 && pbase
1080 && ASAN_USE_AFTER_RETURN
)
1082 use_after_return_class
= floor_log2 (asan_frame_size
- 1) - 5;
1083 /* __asan_stack_malloc_N guarantees alignment
1084 N < 6 ? (64 << N) : 4096 bytes. */
1085 if (alignb
> (use_after_return_class
< 6
1086 ? (64U << use_after_return_class
) : 4096U))
1087 use_after_return_class
= -1;
1088 else if (alignb
> ASAN_RED_ZONE_SIZE
&& (asan_frame_size
& (alignb
- 1)))
1089 base_align_bias
= ((asan_frame_size
+ alignb
- 1)
1090 & ~(alignb
- HOST_WIDE_INT_1
)) - asan_frame_size
;
1092 /* Align base if target is STRICT_ALIGNMENT. */
1093 if (STRICT_ALIGNMENT
)
1094 base
= expand_binop (Pmode
, and_optab
, base
,
1095 gen_int_mode (-((GET_MODE_ALIGNMENT (SImode
)
1096 << ASAN_SHADOW_SHIFT
)
1097 / BITS_PER_UNIT
), Pmode
), NULL_RTX
,
1100 if (use_after_return_class
== -1 && pbase
)
1101 emit_move_insn (pbase
, base
);
1103 base
= expand_binop (Pmode
, add_optab
, base
,
1104 gen_int_mode (base_offset
- base_align_bias
, Pmode
),
1105 NULL_RTX
, 1, OPTAB_DIRECT
);
1106 orig_base
= NULL_RTX
;
1107 if (use_after_return_class
!= -1)
1109 if (asan_detect_stack_use_after_return
== NULL_TREE
)
1111 id
= get_identifier ("__asan_option_detect_stack_use_after_return");
1112 decl
= build_decl (BUILTINS_LOCATION
, VAR_DECL
, id
,
1114 SET_DECL_ASSEMBLER_NAME (decl
, id
);
1115 TREE_ADDRESSABLE (decl
) = 1;
1116 DECL_ARTIFICIAL (decl
) = 1;
1117 DECL_IGNORED_P (decl
) = 1;
1118 DECL_EXTERNAL (decl
) = 1;
1119 TREE_STATIC (decl
) = 1;
1120 TREE_PUBLIC (decl
) = 1;
1121 TREE_USED (decl
) = 1;
1122 asan_detect_stack_use_after_return
= decl
;
1124 orig_base
= gen_reg_rtx (Pmode
);
1125 emit_move_insn (orig_base
, base
);
1126 ret
= expand_normal (asan_detect_stack_use_after_return
);
1127 lab
= gen_label_rtx ();
1128 int very_likely
= REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1);
1129 emit_cmp_and_jump_insns (ret
, const0_rtx
, EQ
, NULL_RTX
,
1130 VOIDmode
, 0, lab
, very_likely
);
1131 snprintf (buf
, sizeof buf
, "__asan_stack_malloc_%d",
1132 use_after_return_class
);
1133 ret
= init_one_libfunc (buf
);
1134 rtx addr
= convert_memory_address (ptr_mode
, base
);
1135 ret
= emit_library_call_value (ret
, NULL_RTX
, LCT_NORMAL
, ptr_mode
, 2,
1136 GEN_INT (asan_frame_size
1138 TYPE_MODE (pointer_sized_int_node
),
1140 ret
= convert_memory_address (Pmode
, ret
);
1141 emit_move_insn (base
, ret
);
1143 emit_move_insn (pbase
, expand_binop (Pmode
, add_optab
, base
,
1144 gen_int_mode (base_align_bias
1145 - base_offset
, Pmode
),
1146 NULL_RTX
, 1, OPTAB_DIRECT
));
1148 mem
= gen_rtx_MEM (ptr_mode
, base
);
1149 mem
= adjust_address (mem
, VOIDmode
, base_align_bias
);
1150 emit_move_insn (mem
, gen_int_mode (ASAN_STACK_FRAME_MAGIC
, ptr_mode
));
1151 mem
= adjust_address (mem
, VOIDmode
, GET_MODE_SIZE (ptr_mode
));
1152 emit_move_insn (mem
, expand_normal (str_cst
));
1153 mem
= adjust_address (mem
, VOIDmode
, GET_MODE_SIZE (ptr_mode
));
1154 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASANPC", current_function_funcdef_no
);
1155 id
= get_identifier (buf
);
1156 decl
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
1157 VAR_DECL
, id
, char_type_node
);
1158 SET_DECL_ASSEMBLER_NAME (decl
, id
);
1159 TREE_ADDRESSABLE (decl
) = 1;
1160 TREE_READONLY (decl
) = 1;
1161 DECL_ARTIFICIAL (decl
) = 1;
1162 DECL_IGNORED_P (decl
) = 1;
1163 TREE_STATIC (decl
) = 1;
1164 TREE_PUBLIC (decl
) = 0;
1165 TREE_USED (decl
) = 1;
1166 DECL_INITIAL (decl
) = decl
;
1167 TREE_ASM_WRITTEN (decl
) = 1;
1168 TREE_ASM_WRITTEN (id
) = 1;
1169 emit_move_insn (mem
, expand_normal (build_fold_addr_expr (decl
)));
1170 shadow_base
= expand_binop (Pmode
, lshr_optab
, base
,
1171 GEN_INT (ASAN_SHADOW_SHIFT
),
1172 NULL_RTX
, 1, OPTAB_DIRECT
);
1174 = plus_constant (Pmode
, shadow_base
,
1175 asan_shadow_offset ()
1176 + (base_align_bias
>> ASAN_SHADOW_SHIFT
));
1177 gcc_assert (asan_shadow_set
!= -1
1178 && (ASAN_RED_ZONE_SIZE
>> ASAN_SHADOW_SHIFT
) == 4);
1179 shadow_mem
= gen_rtx_MEM (SImode
, shadow_base
);
1180 set_mem_alias_set (shadow_mem
, asan_shadow_set
);
1181 if (STRICT_ALIGNMENT
)
1182 set_mem_align (shadow_mem
, (GET_MODE_ALIGNMENT (SImode
)));
1183 prev_offset
= base_offset
;
1184 for (l
= length
; l
; l
-= 2)
1187 cur_shadow_byte
= ASAN_STACK_MAGIC_RIGHT
;
1188 offset
= offsets
[l
- 1];
1189 if ((offset
- base_offset
) & (ASAN_RED_ZONE_SIZE
- 1))
1193 = base_offset
+ ((offset
- base_offset
)
1194 & ~(ASAN_RED_ZONE_SIZE
- HOST_WIDE_INT_1
));
1195 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1196 (aoff
- prev_offset
)
1197 >> ASAN_SHADOW_SHIFT
);
1199 for (i
= 0; i
< 4; i
++, aoff
+= (1 << ASAN_SHADOW_SHIFT
))
1202 if (aoff
< offset
- (1 << ASAN_SHADOW_SHIFT
) + 1)
1203 shadow_bytes
[i
] = 0;
1205 shadow_bytes
[i
] = offset
- aoff
;
1208 shadow_bytes
[i
] = ASAN_STACK_MAGIC_PARTIAL
;
1209 emit_move_insn (shadow_mem
, asan_shadow_cst (shadow_bytes
));
1212 while (offset
<= offsets
[l
- 2] - ASAN_RED_ZONE_SIZE
)
1214 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1215 (offset
- prev_offset
)
1216 >> ASAN_SHADOW_SHIFT
);
1217 prev_offset
= offset
;
1218 memset (shadow_bytes
, cur_shadow_byte
, 4);
1219 emit_move_insn (shadow_mem
, asan_shadow_cst (shadow_bytes
));
1220 offset
+= ASAN_RED_ZONE_SIZE
;
1222 cur_shadow_byte
= ASAN_STACK_MAGIC_MIDDLE
;
1224 do_pending_stack_adjust ();
1226 /* Construct epilogue sequence. */
1230 if (use_after_return_class
!= -1)
1232 rtx_code_label
*lab2
= gen_label_rtx ();
1233 char c
= (char) ASAN_STACK_MAGIC_USE_AFTER_RET
;
1234 int very_likely
= REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1);
1235 emit_cmp_and_jump_insns (orig_base
, base
, EQ
, NULL_RTX
,
1236 VOIDmode
, 0, lab2
, very_likely
);
1237 shadow_mem
= gen_rtx_MEM (BLKmode
, shadow_base
);
1238 set_mem_alias_set (shadow_mem
, asan_shadow_set
);
1239 mem
= gen_rtx_MEM (ptr_mode
, base
);
1240 mem
= adjust_address (mem
, VOIDmode
, base_align_bias
);
1241 emit_move_insn (mem
, gen_int_mode (ASAN_STACK_RETIRED_MAGIC
, ptr_mode
));
1242 unsigned HOST_WIDE_INT sz
= asan_frame_size
>> ASAN_SHADOW_SHIFT
;
1243 if (use_after_return_class
< 5
1244 && can_store_by_pieces (sz
, builtin_memset_read_str
, &c
,
1245 BITS_PER_UNIT
, true))
1246 store_by_pieces (shadow_mem
, sz
, builtin_memset_read_str
, &c
,
1247 BITS_PER_UNIT
, true, 0);
1248 else if (use_after_return_class
>= 5
1249 || !set_storage_via_setmem (shadow_mem
,
1251 gen_int_mode (c
, QImode
),
1252 BITS_PER_UNIT
, BITS_PER_UNIT
,
1255 snprintf (buf
, sizeof buf
, "__asan_stack_free_%d",
1256 use_after_return_class
);
1257 ret
= init_one_libfunc (buf
);
1258 rtx addr
= convert_memory_address (ptr_mode
, base
);
1259 rtx orig_addr
= convert_memory_address (ptr_mode
, orig_base
);
1260 emit_library_call (ret
, LCT_NORMAL
, ptr_mode
, 3, addr
, ptr_mode
,
1261 GEN_INT (asan_frame_size
+ base_align_bias
),
1262 TYPE_MODE (pointer_sized_int_node
),
1263 orig_addr
, ptr_mode
);
1265 lab
= gen_label_rtx ();
1270 shadow_mem
= gen_rtx_MEM (BLKmode
, shadow_base
);
1271 set_mem_alias_set (shadow_mem
, asan_shadow_set
);
1273 if (STRICT_ALIGNMENT
)
1274 set_mem_align (shadow_mem
, (GET_MODE_ALIGNMENT (SImode
)));
1276 prev_offset
= base_offset
;
1277 last_offset
= base_offset
;
1279 for (l
= length
; l
; l
-= 2)
1281 offset
= base_offset
+ ((offsets
[l
- 1] - base_offset
)
1282 & ~(ASAN_RED_ZONE_SIZE
- HOST_WIDE_INT_1
));
1283 if (last_offset
+ last_size
!= offset
)
1285 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1286 (last_offset
- prev_offset
)
1287 >> ASAN_SHADOW_SHIFT
);
1288 prev_offset
= last_offset
;
1289 asan_clear_shadow (shadow_mem
, last_size
>> ASAN_SHADOW_SHIFT
);
1290 last_offset
= offset
;
1293 last_size
+= base_offset
+ ((offsets
[l
- 2] - base_offset
)
1294 & ~(ASAN_RED_ZONE_SIZE
- HOST_WIDE_INT_1
))
1299 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1300 (last_offset
- prev_offset
)
1301 >> ASAN_SHADOW_SHIFT
);
1302 asan_clear_shadow (shadow_mem
, last_size
>> ASAN_SHADOW_SHIFT
);
1305 do_pending_stack_adjust ();
1309 insns
= get_insns ();
1314 /* Return true if DECL, a global var, might be overridden and needs
1315 therefore a local alias. */
1318 asan_needs_local_alias (tree decl
)
1320 return DECL_WEAK (decl
) || !targetm
.binds_local_p (decl
);
1323 /* Return true if DECL is a VAR_DECL that should be protected
1324 by Address Sanitizer, by appending a red zone with protected
1325 shadow memory after it and aligning it to at least
1326 ASAN_RED_ZONE_SIZE bytes. */
1329 asan_protect_global (tree decl
)
1336 if (TREE_CODE (decl
) == STRING_CST
)
1338 /* Instrument all STRING_CSTs except those created
1339 by asan_pp_string here. */
1340 if (shadow_ptr_types
[0] != NULL_TREE
1341 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1342 && TREE_TYPE (TREE_TYPE (decl
)) == TREE_TYPE (shadow_ptr_types
[0]))
1346 if (TREE_CODE (decl
) != VAR_DECL
1347 /* TLS vars aren't statically protectable. */
1348 || DECL_THREAD_LOCAL_P (decl
)
1349 /* Externs will be protected elsewhere. */
1350 || DECL_EXTERNAL (decl
)
1351 || !DECL_RTL_SET_P (decl
)
1352 /* Comdat vars pose an ABI problem, we can't know if
1353 the var that is selected by the linker will have
1355 || DECL_ONE_ONLY (decl
)
1356 /* Similarly for common vars. People can use -fno-common. */
1357 || (DECL_COMMON (decl
) && TREE_PUBLIC (decl
))
1358 /* Don't protect if using user section, often vars placed
1359 into user section from multiple TUs are then assumed
1360 to be an array of such vars, putting padding in there
1361 breaks this assumption. */
1362 || (DECL_SECTION_NAME (decl
) != NULL
1363 && !symtab_node::get (decl
)->implicit_section
)
1364 || DECL_SIZE (decl
) == 0
1365 || ASAN_RED_ZONE_SIZE
* BITS_PER_UNIT
> MAX_OFILE_ALIGNMENT
1366 || !valid_constant_size_p (DECL_SIZE_UNIT (decl
))
1367 || DECL_ALIGN_UNIT (decl
) > 2 * ASAN_RED_ZONE_SIZE
1368 || TREE_TYPE (decl
) == ubsan_get_source_location_type ())
1371 rtl
= DECL_RTL (decl
);
1372 if (!MEM_P (rtl
) || GET_CODE (XEXP (rtl
, 0)) != SYMBOL_REF
)
1374 symbol
= XEXP (rtl
, 0);
1376 if (CONSTANT_POOL_ADDRESS_P (symbol
)
1377 || TREE_CONSTANT_POOL_ADDRESS_P (symbol
))
1380 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl
)))
1383 #ifndef ASM_OUTPUT_DEF
1384 if (asan_needs_local_alias (decl
))
1391 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1392 IS_STORE is either 1 (for a store) or 0 (for a load). */
1395 report_error_func (bool is_store
, HOST_WIDE_INT size_in_bytes
, int *nargs
)
1397 static enum built_in_function report
[2][6]
1398 = { { BUILT_IN_ASAN_REPORT_LOAD1
, BUILT_IN_ASAN_REPORT_LOAD2
,
1399 BUILT_IN_ASAN_REPORT_LOAD4
, BUILT_IN_ASAN_REPORT_LOAD8
,
1400 BUILT_IN_ASAN_REPORT_LOAD16
, BUILT_IN_ASAN_REPORT_LOAD_N
},
1401 { BUILT_IN_ASAN_REPORT_STORE1
, BUILT_IN_ASAN_REPORT_STORE2
,
1402 BUILT_IN_ASAN_REPORT_STORE4
, BUILT_IN_ASAN_REPORT_STORE8
,
1403 BUILT_IN_ASAN_REPORT_STORE16
, BUILT_IN_ASAN_REPORT_STORE_N
} };
1404 if (size_in_bytes
== -1)
1407 return builtin_decl_implicit (report
[is_store
][5]);
1410 return builtin_decl_implicit (report
[is_store
][exact_log2 (size_in_bytes
)]);
1413 /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1414 IS_STORE is either 1 (for a store) or 0 (for a load). */
1417 check_func (bool is_store
, int size_in_bytes
, int *nargs
)
1419 static enum built_in_function check
[2][6]
1420 = { { BUILT_IN_ASAN_LOAD1
, BUILT_IN_ASAN_LOAD2
,
1421 BUILT_IN_ASAN_LOAD4
, BUILT_IN_ASAN_LOAD8
,
1422 BUILT_IN_ASAN_LOAD16
, BUILT_IN_ASAN_LOADN
},
1423 { BUILT_IN_ASAN_STORE1
, BUILT_IN_ASAN_STORE2
,
1424 BUILT_IN_ASAN_STORE4
, BUILT_IN_ASAN_STORE8
,
1425 BUILT_IN_ASAN_STORE16
, BUILT_IN_ASAN_STOREN
} };
1426 if (size_in_bytes
== -1)
1429 return builtin_decl_implicit (check
[is_store
][5]);
1432 return builtin_decl_implicit (check
[is_store
][exact_log2 (size_in_bytes
)]);
1435 /* Split the current basic block and create a condition statement
1436 insertion point right before or after the statement pointed to by
1437 ITER. Return an iterator to the point at which the caller might
1438 safely insert the condition statement.
1440 THEN_BLOCK must be set to the address of an uninitialized instance
1441 of basic_block. The function will then set *THEN_BLOCK to the
1442 'then block' of the condition statement to be inserted by the
1445 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1446 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1448 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1449 block' of the condition statement to be inserted by the caller.
1451 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1452 statements starting from *ITER, and *THEN_BLOCK is a new empty
1455 *ITER is adjusted to point to always point to the first statement
1456 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1457 same as what ITER was pointing to prior to calling this function,
1458 if BEFORE_P is true; otherwise, it is its following statement. */
1460 gimple_stmt_iterator
1461 create_cond_insert_point (gimple_stmt_iterator
*iter
,
1463 bool then_more_likely_p
,
1464 bool create_then_fallthru_edge
,
1465 basic_block
*then_block
,
1466 basic_block
*fallthrough_block
)
1468 gimple_stmt_iterator gsi
= *iter
;
1470 if (!gsi_end_p (gsi
) && before_p
)
1473 basic_block cur_bb
= gsi_bb (*iter
);
1475 edge e
= split_block (cur_bb
, gsi_stmt (gsi
));
1477 /* Get a hold on the 'condition block', the 'then block' and the
1479 basic_block cond_bb
= e
->src
;
1480 basic_block fallthru_bb
= e
->dest
;
1481 basic_block then_bb
= create_empty_bb (cond_bb
);
1484 add_bb_to_loop (then_bb
, cond_bb
->loop_father
);
1485 loops_state_set (LOOPS_NEED_FIXUP
);
1488 /* Set up the newly created 'then block'. */
1489 e
= make_edge (cond_bb
, then_bb
, EDGE_TRUE_VALUE
);
1490 int fallthrough_probability
1491 = then_more_likely_p
1492 ? PROB_VERY_UNLIKELY
1493 : PROB_ALWAYS
- PROB_VERY_UNLIKELY
;
1494 e
->probability
= PROB_ALWAYS
- fallthrough_probability
;
1495 if (create_then_fallthru_edge
)
1496 make_single_succ_edge (then_bb
, fallthru_bb
, EDGE_FALLTHRU
);
1498 /* Set up the fallthrough basic block. */
1499 e
= find_edge (cond_bb
, fallthru_bb
);
1500 e
->flags
= EDGE_FALSE_VALUE
;
1501 e
->count
= cond_bb
->count
;
1502 e
->probability
= fallthrough_probability
;
1504 /* Update dominance info for the newly created then_bb; note that
1505 fallthru_bb's dominance info has already been updated by
1507 if (dom_info_available_p (CDI_DOMINATORS
))
1508 set_immediate_dominator (CDI_DOMINATORS
, then_bb
, cond_bb
);
1510 *then_block
= then_bb
;
1511 *fallthrough_block
= fallthru_bb
;
1512 *iter
= gsi_start_bb (fallthru_bb
);
1514 return gsi_last_bb (cond_bb
);
1517 /* Insert an if condition followed by a 'then block' right before the
1518 statement pointed to by ITER. The fallthrough block -- which is the
1519 else block of the condition as well as the destination of the
1520 outcoming edge of the 'then block' -- starts with the statement
1523 COND is the condition of the if.
1525 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1526 'then block' is higher than the probability of the edge to the
1529 Upon completion of the function, *THEN_BB is set to the newly
1530 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1533 *ITER is adjusted to still point to the same statement it was
1534 pointing to initially. */
1537 insert_if_then_before_iter (gimple cond
,
1538 gimple_stmt_iterator
*iter
,
1539 bool then_more_likely_p
,
1540 basic_block
*then_bb
,
1541 basic_block
*fallthrough_bb
)
1543 gimple_stmt_iterator cond_insert_point
=
1544 create_cond_insert_point (iter
,
1547 /*create_then_fallthru_edge=*/true,
1550 gsi_insert_after (&cond_insert_point
, cond
, GSI_NEW_STMT
);
1554 (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset (). */
1557 build_shadow_mem_access (gimple_stmt_iterator
*gsi
, location_t location
,
1558 tree base_addr
, tree shadow_ptr_type
)
1560 tree t
, uintptr_type
= TREE_TYPE (base_addr
);
1561 tree shadow_type
= TREE_TYPE (shadow_ptr_type
);
1564 t
= build_int_cst (uintptr_type
, ASAN_SHADOW_SHIFT
);
1565 g
= gimple_build_assign_with_ops (RSHIFT_EXPR
,
1566 make_ssa_name (uintptr_type
, NULL
),
1568 gimple_set_location (g
, location
);
1569 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
1571 t
= build_int_cst (uintptr_type
, asan_shadow_offset ());
1572 g
= gimple_build_assign_with_ops (PLUS_EXPR
,
1573 make_ssa_name (uintptr_type
, NULL
),
1574 gimple_assign_lhs (g
), t
);
1575 gimple_set_location (g
, location
);
1576 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
1578 g
= gimple_build_assign_with_ops (NOP_EXPR
,
1579 make_ssa_name (shadow_ptr_type
, NULL
),
1580 gimple_assign_lhs (g
), NULL_TREE
);
1581 gimple_set_location (g
, location
);
1582 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
1584 t
= build2 (MEM_REF
, shadow_type
, gimple_assign_lhs (g
),
1585 build_int_cst (shadow_ptr_type
, 0));
1586 g
= gimple_build_assign_with_ops (MEM_REF
,
1587 make_ssa_name (shadow_type
, NULL
),
1589 gimple_set_location (g
, location
);
1590 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
1591 return gimple_assign_lhs (g
);
1594 /* BASE can already be an SSA_NAME; in that case, do not create a
1595 new SSA_NAME for it. */
1598 maybe_create_ssa_name (location_t loc
, tree base
, gimple_stmt_iterator
*iter
,
1601 if (TREE_CODE (base
) == SSA_NAME
)
1604 = gimple_build_assign_with_ops (TREE_CODE (base
),
1605 make_ssa_name (TREE_TYPE (base
), NULL
),
1607 gimple_set_location (g
, loc
);
1609 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
1611 gsi_insert_after (iter
, g
, GSI_NEW_STMT
);
1612 return gimple_assign_lhs (g
);
1615 /* LEN can already have necessary size and precision;
1616 in that case, do not create a new variable. */
1619 maybe_cast_to_ptrmode (location_t loc
, tree len
, gimple_stmt_iterator
*iter
,
1622 if (ptrofftype_p (len
))
1625 = gimple_build_assign_with_ops (NOP_EXPR
,
1626 make_ssa_name (pointer_sized_int_node
, NULL
),
1628 gimple_set_location (g
, loc
);
1630 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
1632 gsi_insert_after (iter
, g
, GSI_NEW_STMT
);
1633 return gimple_assign_lhs (g
);
1636 /* Instrument the memory access instruction BASE. Insert new
1637 statements before or after ITER.
1639 Note that the memory access represented by BASE can be either an
1640 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1641 location. IS_STORE is TRUE for a store, FALSE for a load.
1642 BEFORE_P is TRUE for inserting the instrumentation code before
1643 ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
1644 for a scalar memory access and FALSE for memory region access.
1645 NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
1646 length. ALIGN tells alignment of accessed memory object.
1648 START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
1649 memory region have already been instrumented.
1651 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1652 statement it was pointing to prior to calling this function,
1653 otherwise, it points to the statement logically following it. */
1656 build_check_stmt (location_t loc
, tree base
, tree len
,
1657 HOST_WIDE_INT size_in_bytes
, gimple_stmt_iterator
*iter
,
1658 bool is_non_zero_len
, bool before_p
, bool is_store
,
1659 bool is_scalar_access
, unsigned int align
= 0,
1660 bool start_instrumented
= false,
1661 bool end_instrumented
= false)
1663 gimple_stmt_iterator gsi
= *iter
;
1666 gcc_assert (!(size_in_bytes
> 0 && !is_non_zero_len
));
1668 if (start_instrumented
&& end_instrumented
)
1677 base
= unshare_expr (base
);
1678 base
= maybe_create_ssa_name (loc
, base
, &gsi
, before_p
);
1682 len
= unshare_expr (len
);
1683 len
= maybe_cast_to_ptrmode (loc
, len
, iter
, before_p
);
1687 gcc_assert (size_in_bytes
!= -1);
1688 len
= build_int_cst (pointer_sized_int_node
, size_in_bytes
);
1691 if (size_in_bytes
> 1)
1693 if ((size_in_bytes
& (size_in_bytes
- 1)) != 0
1694 || size_in_bytes
> 16)
1695 is_scalar_access
= false;
1696 else if (align
&& align
< size_in_bytes
* BITS_PER_UNIT
)
1698 /* On non-strict alignment targets, if
1699 16-byte access is just 8-byte aligned,
1700 this will result in misaligned shadow
1701 memory 2 byte load, but otherwise can
1702 be handled using one read. */
1703 if (size_in_bytes
!= 16
1705 || align
< 8 * BITS_PER_UNIT
)
1706 is_scalar_access
= false;
1710 HOST_WIDE_INT flags
= 0;
1712 flags
|= ASAN_CHECK_STORE
;
1713 if (is_non_zero_len
)
1714 flags
|= ASAN_CHECK_NON_ZERO_LEN
;
1715 if (is_scalar_access
)
1716 flags
|= ASAN_CHECK_SCALAR_ACCESS
;
1717 if (start_instrumented
)
1718 flags
|= ASAN_CHECK_START_INSTRUMENTED
;
1719 if (end_instrumented
)
1720 flags
|= ASAN_CHECK_END_INSTRUMENTED
;
1722 g
= gimple_build_call_internal (IFN_ASAN_CHECK
, 4,
1723 build_int_cst (integer_type_node
, flags
),
1725 build_int_cst (integer_type_node
,
1726 align
/ BITS_PER_UNIT
));
1727 gimple_set_location (g
, loc
);
1729 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
1732 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1738 /* If T represents a memory access, add instrumentation code before ITER.
1739 LOCATION is source code location.
1740 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
1743 instrument_derefs (gimple_stmt_iterator
*iter
, tree t
,
1744 location_t location
, bool is_store
)
1746 if (is_store
&& !ASAN_INSTRUMENT_WRITES
)
1748 if (!is_store
&& !ASAN_INSTRUMENT_READS
)
1752 HOST_WIDE_INT size_in_bytes
;
1754 type
= TREE_TYPE (t
);
1755 switch (TREE_CODE (t
))
1769 size_in_bytes
= int_size_in_bytes (type
);
1770 if (size_in_bytes
<= 0)
1773 HOST_WIDE_INT bitsize
, bitpos
;
1775 enum machine_mode mode
;
1776 int volatilep
= 0, unsignedp
= 0;
1777 tree inner
= get_inner_reference (t
, &bitsize
, &bitpos
, &offset
,
1778 &mode
, &unsignedp
, &volatilep
, false);
1780 if (TREE_CODE (t
) == COMPONENT_REF
1781 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t
, 1)) != NULL_TREE
)
1783 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t
, 1));
1784 instrument_derefs (iter
, build3 (COMPONENT_REF
, TREE_TYPE (repr
),
1785 TREE_OPERAND (t
, 0), repr
,
1786 NULL_TREE
), location
, is_store
);
1790 if (bitpos
% BITS_PER_UNIT
1791 || bitsize
!= size_in_bytes
* BITS_PER_UNIT
)
1794 if (TREE_CODE (inner
) == VAR_DECL
1795 && offset
== NULL_TREE
1797 && DECL_SIZE (inner
)
1798 && tree_fits_shwi_p (DECL_SIZE (inner
))
1799 && bitpos
+ bitsize
<= tree_to_shwi (DECL_SIZE (inner
)))
1801 if (DECL_THREAD_LOCAL_P (inner
))
1803 if (!TREE_STATIC (inner
))
1805 /* Automatic vars in the current function will be always
1807 if (decl_function_context (inner
) == current_function_decl
)
1810 /* Always instrument external vars, they might be dynamically
1812 else if (!DECL_EXTERNAL (inner
))
1814 /* For static vars if they are known not to be dynamically
1815 initialized, they will be always accessible. */
1816 varpool_node
*vnode
= varpool_node::get (inner
);
1817 if (vnode
&& !vnode
->dynamically_initialized
)
1822 base
= build_fold_addr_expr (t
);
1823 if (!has_mem_ref_been_instrumented (base
, size_in_bytes
))
1825 unsigned int align
= get_object_alignment (t
);
1826 build_check_stmt (location
, base
, NULL_TREE
, size_in_bytes
, iter
,
1827 /*is_non_zero_len*/size_in_bytes
> 0, /*before_p=*/true,
1828 is_store
, /*is_scalar_access*/true, align
);
1829 update_mem_ref_hash_table (base
, size_in_bytes
);
1830 update_mem_ref_hash_table (t
, size_in_bytes
);
1835 /* Instrument an access to a contiguous memory region that starts at
1836 the address pointed to by BASE, over a length of LEN (expressed in
1837 the sizeof (*BASE) bytes). ITER points to the instruction before
1838 which the instrumentation instructions must be inserted. LOCATION
1839 is the source location that the instrumentation instructions must
1840 have. If IS_STORE is true, then the memory access is a store;
1841 otherwise, it's a load. */
1844 instrument_mem_region_access (tree base
, tree len
,
1845 gimple_stmt_iterator
*iter
,
1846 location_t location
, bool is_store
)
1848 if (!POINTER_TYPE_P (TREE_TYPE (base
))
1849 || !INTEGRAL_TYPE_P (TREE_TYPE (len
))
1850 || integer_zerop (len
))
1853 /* If the beginning of the memory region has already been
1854 instrumented, do not instrument it. */
1855 bool start_instrumented
= has_mem_ref_been_instrumented (base
, 1);
1857 /* If the end of the memory region has already been instrumented, do
1858 not instrument it. */
1859 tree end
= asan_mem_ref_get_end (base
, len
);
1860 bool end_instrumented
= has_mem_ref_been_instrumented (end
, 1);
1862 HOST_WIDE_INT size_in_bytes
= tree_fits_shwi_p (len
) ? tree_to_shwi (len
) : -1;
1864 build_check_stmt (location
, base
, len
, size_in_bytes
, iter
,
1865 /*is_non_zero_len*/size_in_bytes
> 0, /*before_p*/true,
1866 is_store
, /*is_scalar_access*/false, /*align*/0,
1867 start_instrumented
, end_instrumented
);
1869 update_mem_ref_hash_table (base
, 1);
1870 if (size_in_bytes
!= -1)
1871 update_mem_ref_hash_table (end
, 1);
1873 *iter
= gsi_for_stmt (gsi_stmt (*iter
));
1876 /* Instrument the call (to the builtin strlen function) pointed to by
1879 This function instruments the access to the first byte of the
1880 argument, right before the call. After the call it instruments the
1881 access to the last byte of the argument; it uses the result of the
1882 call to deduce the offset of that last byte.
1884 Upon completion, iff the call has actually been instrumented, this
1885 function returns TRUE and *ITER points to the statement logically
1886 following the built-in strlen function call *ITER was initially
1887 pointing to. Otherwise, the function returns FALSE and *ITER
1888 remains unchanged. */
1891 instrument_strlen_call (gimple_stmt_iterator
*iter
)
1894 gimple call
= gsi_stmt (*iter
);
1895 gcc_assert (is_gimple_call (call
));
1897 tree callee
= gimple_call_fndecl (call
);
1898 gcc_assert (is_builtin_fn (callee
)
1899 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
1900 && DECL_FUNCTION_CODE (callee
) == BUILT_IN_STRLEN
);
1902 location_t loc
= gimple_location (call
);
1904 tree len
= gimple_call_lhs (call
);
1906 /* Some passes might clear the return value of the strlen call;
1907 bail out in that case. Return FALSE as we are not advancing
1910 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (len
)));
1912 len
= maybe_cast_to_ptrmode (loc
, len
, iter
, /*before_p*/false);
1914 tree str_arg
= gimple_call_arg (call
, 0);
1915 bool start_instrumented
= has_mem_ref_been_instrumented (str_arg
, 1);
1917 tree cptr_type
= build_pointer_type (char_type_node
);
1918 g
= gimple_build_assign_with_ops (NOP_EXPR
,
1919 make_ssa_name (cptr_type
, NULL
),
1921 gimple_set_location (g
, loc
);
1922 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
1923 str_arg
= gimple_assign_lhs (g
);
1925 build_check_stmt (loc
, str_arg
, NULL_TREE
, 1, iter
,
1926 /*is_non_zero_len*/true, /*before_p=*/true,
1927 /*is_store=*/false, /*is_scalar_access*/true, /*align*/0,
1928 start_instrumented
, start_instrumented
);
1930 g
= gimple_build_assign_with_ops (POINTER_PLUS_EXPR
,
1931 make_ssa_name (cptr_type
, NULL
),
1934 gimple_set_location (g
, loc
);
1935 gsi_insert_after (iter
, g
, GSI_NEW_STMT
);
1937 build_check_stmt (loc
, gimple_assign_lhs (g
), NULL_TREE
, 1, iter
,
1938 /*is_non_zero_len*/true, /*before_p=*/false,
1939 /*is_store=*/false, /*is_scalar_access*/true, /*align*/0);
1944 /* Instrument the call to a built-in memory access function that is
1945 pointed to by the iterator ITER.
1947 Upon completion, return TRUE iff *ITER has been advanced to the
1948 statement following the one it was originally pointing to. */
1951 instrument_builtin_call (gimple_stmt_iterator
*iter
)
1953 if (!ASAN_MEMINTRIN
)
1956 bool iter_advanced_p
= false;
1957 gimple call
= gsi_stmt (*iter
);
1959 gcc_checking_assert (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
));
1961 tree callee
= gimple_call_fndecl (call
);
1962 location_t loc
= gimple_location (call
);
1964 if (DECL_FUNCTION_CODE (callee
) == BUILT_IN_STRLEN
)
1965 iter_advanced_p
= instrument_strlen_call (iter
);
1968 asan_mem_ref src0
, src1
, dest
;
1969 asan_mem_ref_init (&src0
, NULL
, 1);
1970 asan_mem_ref_init (&src1
, NULL
, 1);
1971 asan_mem_ref_init (&dest
, NULL
, 1);
1973 tree src0_len
= NULL_TREE
, src1_len
= NULL_TREE
, dest_len
= NULL_TREE
;
1974 bool src0_is_store
= false, src1_is_store
= false,
1975 dest_is_store
= false, dest_is_deref
= false;
1977 if (get_mem_refs_of_builtin_call (call
,
1978 &src0
, &src0_len
, &src0_is_store
,
1979 &src1
, &src1_len
, &src1_is_store
,
1980 &dest
, &dest_len
, &dest_is_store
,
1985 instrument_derefs (iter
, dest
.start
, loc
, dest_is_store
);
1987 iter_advanced_p
= true;
1989 else if (src0_len
|| src1_len
|| dest_len
)
1991 if (src0
.start
!= NULL_TREE
)
1992 instrument_mem_region_access (src0
.start
, src0_len
,
1993 iter
, loc
, /*is_store=*/false);
1994 if (src1
.start
!= NULL_TREE
)
1995 instrument_mem_region_access (src1
.start
, src1_len
,
1996 iter
, loc
, /*is_store=*/false);
1997 if (dest
.start
!= NULL_TREE
)
1998 instrument_mem_region_access (dest
.start
, dest_len
,
1999 iter
, loc
, /*is_store=*/true);
2000 *iter
= gsi_for_stmt (call
);
2002 iter_advanced_p
= true;
2006 return iter_advanced_p
;
2009 /* Instrument the assignment statement ITER if it is subject to
2010 instrumentation. Return TRUE iff instrumentation actually
2011 happened. In that case, the iterator ITER is advanced to the next
2012 logical expression following the one initially pointed to by ITER,
2013 and the relevant memory reference that which access has been
2014 instrumented is added to the memory references hash table. */
2017 maybe_instrument_assignment (gimple_stmt_iterator
*iter
)
2019 gimple s
= gsi_stmt (*iter
);
2021 gcc_assert (gimple_assign_single_p (s
));
2023 tree ref_expr
= NULL_TREE
;
2024 bool is_store
, is_instrumented
= false;
2026 if (gimple_store_p (s
))
2028 ref_expr
= gimple_assign_lhs (s
);
2030 instrument_derefs (iter
, ref_expr
,
2031 gimple_location (s
),
2033 is_instrumented
= true;
2036 if (gimple_assign_load_p (s
))
2038 ref_expr
= gimple_assign_rhs1 (s
);
2040 instrument_derefs (iter
, ref_expr
,
2041 gimple_location (s
),
2043 is_instrumented
= true;
2046 if (is_instrumented
)
2049 return is_instrumented
;
2052 /* Instrument the function call pointed to by the iterator ITER, if it
2053 is subject to instrumentation. At the moment, the only function
2054 calls that are instrumented are some built-in functions that access
2055 memory. Look at instrument_builtin_call to learn more.
2057 Upon completion return TRUE iff *ITER was advanced to the statement
2058 following the one it was originally pointing to. */
2061 maybe_instrument_call (gimple_stmt_iterator
*iter
)
2063 gimple stmt
= gsi_stmt (*iter
);
2064 bool is_builtin
= gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
);
2066 if (is_builtin
&& instrument_builtin_call (iter
))
2069 if (gimple_call_noreturn_p (stmt
))
2073 tree callee
= gimple_call_fndecl (stmt
);
2074 switch (DECL_FUNCTION_CODE (callee
))
2076 case BUILT_IN_UNREACHABLE
:
2078 /* Don't instrument these. */
2084 tree decl
= builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN
);
2085 gimple g
= gimple_build_call (decl
, 0);
2086 gimple_set_location (g
, gimple_location (stmt
));
2087 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
2092 /* Walk each instruction of all basic block and instrument those that
2093 represent memory references: loads, stores, or function calls.
2094 In a given basic block, this function avoids instrumenting memory
2095 references that have already been instrumented. */
2098 transform_statements (void)
2100 basic_block bb
, last_bb
= NULL
;
2101 gimple_stmt_iterator i
;
2102 int saved_last_basic_block
= last_basic_block_for_fn (cfun
);
2104 FOR_EACH_BB_FN (bb
, cfun
)
2106 basic_block prev_bb
= bb
;
2108 if (bb
->index
>= saved_last_basic_block
) continue;
2110 /* Flush the mem ref hash table, if current bb doesn't have
2111 exactly one predecessor, or if that predecessor (skipping
2112 over asan created basic blocks) isn't the last processed
2113 basic block. Thus we effectively flush on extended basic
2114 block boundaries. */
2115 while (single_pred_p (prev_bb
))
2117 prev_bb
= single_pred (prev_bb
);
2118 if (prev_bb
->index
< saved_last_basic_block
)
2121 if (prev_bb
!= last_bb
)
2122 empty_mem_ref_hash_table ();
2125 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
);)
2127 gimple s
= gsi_stmt (i
);
2129 if (has_stmt_been_instrumented_p (s
))
2131 else if (gimple_assign_single_p (s
)
2132 && !gimple_clobber_p (s
)
2133 && maybe_instrument_assignment (&i
))
2134 /* Nothing to do as maybe_instrument_assignment advanced
2136 else if (is_gimple_call (s
) && maybe_instrument_call (&i
))
2137 /* Nothing to do as maybe_instrument_call
2138 advanced the iterator I. */;
2141 /* No instrumentation happened.
2143 If the current instruction is a function call that
2144 might free something, let's forget about the memory
2145 references that got instrumented. Otherwise we might
2146 miss some instrumentation opportunities. */
2147 if (is_gimple_call (s
) && !nonfreeing_call_p (s
))
2148 empty_mem_ref_hash_table ();
2154 free_mem_ref_resources ();
2158 __asan_before_dynamic_init (module_name)
2160 __asan_after_dynamic_init ()
2164 asan_dynamic_init_call (bool after_p
)
2166 tree fn
= builtin_decl_implicit (after_p
2167 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2168 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT
);
2169 tree module_name_cst
= NULL_TREE
;
2172 pretty_printer module_name_pp
;
2173 pp_string (&module_name_pp
, main_input_filename
);
2175 if (shadow_ptr_types
[0] == NULL_TREE
)
2176 asan_init_shadow_ptr_types ();
2177 module_name_cst
= asan_pp_string (&module_name_pp
);
2178 module_name_cst
= fold_convert (const_ptr_type_node
,
2182 return build_call_expr (fn
, after_p
? 0 : 1, module_name_cst
);
2186 struct __asan_global
2190 uptr __size_with_redzone;
2192 const void *__module_name;
2193 uptr __has_dynamic_init;
2194 __asan_global_source_location *__location;
2198 asan_global_struct (void)
2200 static const char *field_names
[7]
2201 = { "__beg", "__size", "__size_with_redzone",
2202 "__name", "__module_name", "__has_dynamic_init", "__location"};
2203 tree fields
[7], ret
;
2206 ret
= make_node (RECORD_TYPE
);
2207 for (i
= 0; i
< 7; i
++)
2210 = build_decl (UNKNOWN_LOCATION
, FIELD_DECL
,
2211 get_identifier (field_names
[i
]),
2212 (i
== 0 || i
== 3) ? const_ptr_type_node
2213 : pointer_sized_int_node
);
2214 DECL_CONTEXT (fields
[i
]) = ret
;
2216 DECL_CHAIN (fields
[i
- 1]) = fields
[i
];
2218 TYPE_FIELDS (ret
) = fields
[0];
2219 TYPE_NAME (ret
) = get_identifier ("__asan_global");
2224 /* Append description of a single global DECL into vector V.
2225 TYPE is __asan_global struct type as returned by asan_global_struct. */
2228 asan_add_global (tree decl
, tree type
, vec
<constructor_elt
, va_gc
> *v
)
2230 tree init
, uptr
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
)));
2231 unsigned HOST_WIDE_INT size
;
2232 tree str_cst
, module_name_cst
, refdecl
= decl
;
2233 vec
<constructor_elt
, va_gc
> *vinner
= NULL
;
2235 pretty_printer asan_pp
, module_name_pp
;
2237 if (DECL_NAME (decl
))
2238 pp_tree_identifier (&asan_pp
, DECL_NAME (decl
));
2240 pp_string (&asan_pp
, "<unknown>");
2241 str_cst
= asan_pp_string (&asan_pp
);
2243 pp_string (&module_name_pp
, main_input_filename
);
2244 module_name_cst
= asan_pp_string (&module_name_pp
);
2246 if (asan_needs_local_alias (decl
))
2249 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASAN", vec_safe_length (v
) + 1);
2250 refdecl
= build_decl (DECL_SOURCE_LOCATION (decl
),
2251 VAR_DECL
, get_identifier (buf
), TREE_TYPE (decl
));
2252 TREE_ADDRESSABLE (refdecl
) = TREE_ADDRESSABLE (decl
);
2253 TREE_READONLY (refdecl
) = TREE_READONLY (decl
);
2254 TREE_THIS_VOLATILE (refdecl
) = TREE_THIS_VOLATILE (decl
);
2255 DECL_GIMPLE_REG_P (refdecl
) = DECL_GIMPLE_REG_P (decl
);
2256 DECL_ARTIFICIAL (refdecl
) = DECL_ARTIFICIAL (decl
);
2257 DECL_IGNORED_P (refdecl
) = DECL_IGNORED_P (decl
);
2258 TREE_STATIC (refdecl
) = 1;
2259 TREE_PUBLIC (refdecl
) = 0;
2260 TREE_USED (refdecl
) = 1;
2261 assemble_alias (refdecl
, DECL_ASSEMBLER_NAME (decl
));
2264 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2265 fold_convert (const_ptr_type_node
,
2266 build_fold_addr_expr (refdecl
)));
2267 size
= tree_to_uhwi (DECL_SIZE_UNIT (decl
));
2268 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
, build_int_cst (uptr
, size
));
2269 size
+= asan_red_zone_size (size
);
2270 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
, build_int_cst (uptr
, size
));
2271 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2272 fold_convert (const_ptr_type_node
, str_cst
));
2273 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2274 fold_convert (const_ptr_type_node
, module_name_cst
));
2275 varpool_node
*vnode
= varpool_node::get (decl
);
2276 int has_dynamic_init
= vnode
? vnode
->dynamically_initialized
: 0;
2277 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2278 build_int_cst (uptr
, has_dynamic_init
));
2279 tree locptr
= NULL_TREE
;
2280 location_t loc
= DECL_SOURCE_LOCATION (decl
);
2281 expanded_location xloc
= expand_location (loc
);
2282 if (xloc
.file
!= NULL
)
2284 static int lasanloccnt
= 0;
2286 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASANLOC", ++lasanloccnt
);
2287 tree var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
, get_identifier (buf
),
2288 ubsan_get_source_location_type ());
2289 TREE_STATIC (var
) = 1;
2290 TREE_PUBLIC (var
) = 0;
2291 DECL_ARTIFICIAL (var
) = 1;
2292 DECL_IGNORED_P (var
) = 1;
2293 pretty_printer filename_pp
;
2294 pp_string (&filename_pp
, xloc
.file
);
2295 tree str
= asan_pp_string (&filename_pp
);
2296 tree ctor
= build_constructor_va (TREE_TYPE (var
), 3,
2297 NULL_TREE
, str
, NULL_TREE
,
2298 build_int_cst (unsigned_type_node
,
2299 xloc
.line
), NULL_TREE
,
2300 build_int_cst (unsigned_type_node
,
2302 TREE_CONSTANT (ctor
) = 1;
2303 TREE_STATIC (ctor
) = 1;
2304 DECL_INITIAL (var
) = ctor
;
2305 varpool_node::finalize_decl (var
);
2306 locptr
= fold_convert (uptr
, build_fold_addr_expr (var
));
2309 locptr
= build_int_cst (uptr
, 0);
2310 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
, locptr
);
2311 init
= build_constructor (type
, vinner
);
2312 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, init
);
2315 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2317 initialize_sanitizer_builtins (void)
2321 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT
))
2324 tree BT_FN_VOID
= build_function_type_list (void_type_node
, NULL_TREE
);
2326 = build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
2327 tree BT_FN_VOID_CONST_PTR
2328 = build_function_type_list (void_type_node
, const_ptr_type_node
, NULL_TREE
);
2329 tree BT_FN_VOID_PTR_PTR
2330 = build_function_type_list (void_type_node
, ptr_type_node
,
2331 ptr_type_node
, NULL_TREE
);
2332 tree BT_FN_VOID_PTR_PTR_PTR
2333 = build_function_type_list (void_type_node
, ptr_type_node
,
2334 ptr_type_node
, ptr_type_node
, NULL_TREE
);
2335 tree BT_FN_VOID_PTR_PTRMODE
2336 = build_function_type_list (void_type_node
, ptr_type_node
,
2337 pointer_sized_int_node
, NULL_TREE
);
2339 = build_function_type_list (void_type_node
, integer_type_node
, NULL_TREE
);
2340 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT
[5];
2341 tree BT_FN_IX_CONST_VPTR_INT
[5];
2342 tree BT_FN_IX_VPTR_IX_INT
[5];
2343 tree BT_FN_VOID_VPTR_IX_INT
[5];
2345 = build_pointer_type (build_qualified_type (void_type_node
,
2346 TYPE_QUAL_VOLATILE
));
2348 = build_pointer_type (build_qualified_type (void_type_node
,
2352 = lang_hooks
.types
.type_for_size (BOOL_TYPE_SIZE
, 1);
2354 for (i
= 0; i
< 5; i
++)
2356 tree ix
= build_nonstandard_integer_type (BITS_PER_UNIT
* (1 << i
), 1);
2357 BT_FN_BOOL_VPTR_PTR_IX_INT_INT
[i
]
2358 = build_function_type_list (boolt
, vptr
, ptr_type_node
, ix
,
2359 integer_type_node
, integer_type_node
,
2361 BT_FN_IX_CONST_VPTR_INT
[i
]
2362 = build_function_type_list (ix
, cvptr
, integer_type_node
, NULL_TREE
);
2363 BT_FN_IX_VPTR_IX_INT
[i
]
2364 = build_function_type_list (ix
, vptr
, ix
, integer_type_node
,
2366 BT_FN_VOID_VPTR_IX_INT
[i
]
2367 = build_function_type_list (void_type_node
, vptr
, ix
,
2368 integer_type_node
, NULL_TREE
);
2370 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2371 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2372 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2373 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2374 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2375 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2376 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2377 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2378 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2379 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2380 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2381 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2382 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2383 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2384 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2385 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2386 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2387 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2388 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2389 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2390 #undef ATTR_NOTHROW_LEAF_LIST
2391 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2392 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2393 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2394 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2395 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2396 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2397 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2398 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2399 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2400 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2401 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2402 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2403 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2404 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2405 #undef DEF_SANITIZER_BUILTIN
2406 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2407 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2408 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2409 set_call_expr_flags (decl, ATTRS); \
2410 set_builtin_decl (ENUM, decl, true);
2412 #include "sanitizer.def"
2414 #undef DEF_SANITIZER_BUILTIN
2417 /* Called via htab_traverse. Count number of emitted
2418 STRING_CSTs in the constant hash table. */
2421 count_string_csts (constant_descriptor_tree
**slot
,
2422 unsigned HOST_WIDE_INT
*data
)
2424 struct constant_descriptor_tree
*desc
= *slot
;
2425 if (TREE_CODE (desc
->value
) == STRING_CST
2426 && TREE_ASM_WRITTEN (desc
->value
)
2427 && asan_protect_global (desc
->value
))
2432 /* Helper structure to pass two parameters to
2435 struct asan_add_string_csts_data
2438 vec
<constructor_elt
, va_gc
> *v
;
2441 /* Called via hash_table::traverse. Call asan_add_global
2442 on emitted STRING_CSTs from the constant hash table. */
2445 add_string_csts (constant_descriptor_tree
**slot
,
2446 asan_add_string_csts_data
*aascd
)
2448 struct constant_descriptor_tree
*desc
= *slot
;
2449 if (TREE_CODE (desc
->value
) == STRING_CST
2450 && TREE_ASM_WRITTEN (desc
->value
)
2451 && asan_protect_global (desc
->value
))
2453 asan_add_global (SYMBOL_REF_DECL (XEXP (desc
->rtl
, 0)),
2454 aascd
->type
, aascd
->v
);
2459 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2460 invoke ggc_collect. */
2461 static GTY(()) tree asan_ctor_statements
;
2463 /* Module-level instrumentation.
2464 - Insert __asan_init_vN() into the list of CTORs.
2465 - TODO: insert redzones around globals.
2469 asan_finish_file (void)
2471 varpool_node
*vnode
;
2472 unsigned HOST_WIDE_INT gcount
= 0;
2474 if (shadow_ptr_types
[0] == NULL_TREE
)
2475 asan_init_shadow_ptr_types ();
2476 /* Avoid instrumenting code in the asan ctors/dtors.
2477 We don't need to insert padding after the description strings,
2478 nor after .LASAN* array. */
2479 flag_sanitize
&= ~SANITIZE_ADDRESS
;
2481 if (flag_sanitize
& SANITIZE_USER_ADDRESS
)
2483 tree fn
= builtin_decl_implicit (BUILT_IN_ASAN_INIT
);
2484 append_to_statement_list (build_call_expr (fn
, 0), &asan_ctor_statements
);
2486 FOR_EACH_DEFINED_VARIABLE (vnode
)
2487 if (TREE_ASM_WRITTEN (vnode
->decl
)
2488 && asan_protect_global (vnode
->decl
))
2490 hash_table
<tree_descriptor_hasher
> *const_desc_htab
= constant_pool_htab ();
2491 const_desc_htab
->traverse
<unsigned HOST_WIDE_INT
*, count_string_csts
>
2495 tree type
= asan_global_struct (), var
, ctor
;
2496 tree dtor_statements
= NULL_TREE
;
2497 vec
<constructor_elt
, va_gc
> *v
;
2500 type
= build_array_type_nelts (type
, gcount
);
2501 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASAN", 0);
2502 var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
, get_identifier (buf
),
2504 TREE_STATIC (var
) = 1;
2505 TREE_PUBLIC (var
) = 0;
2506 DECL_ARTIFICIAL (var
) = 1;
2507 DECL_IGNORED_P (var
) = 1;
2508 vec_alloc (v
, gcount
);
2509 FOR_EACH_DEFINED_VARIABLE (vnode
)
2510 if (TREE_ASM_WRITTEN (vnode
->decl
)
2511 && asan_protect_global (vnode
->decl
))
2512 asan_add_global (vnode
->decl
, TREE_TYPE (type
), v
);
2513 struct asan_add_string_csts_data aascd
;
2514 aascd
.type
= TREE_TYPE (type
);
2516 const_desc_htab
->traverse
<asan_add_string_csts_data
*, add_string_csts
>
2518 ctor
= build_constructor (type
, v
);
2519 TREE_CONSTANT (ctor
) = 1;
2520 TREE_STATIC (ctor
) = 1;
2521 DECL_INITIAL (var
) = ctor
;
2522 varpool_node::finalize_decl (var
);
2524 tree fn
= builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS
);
2525 tree gcount_tree
= build_int_cst (pointer_sized_int_node
, gcount
);
2526 append_to_statement_list (build_call_expr (fn
, 2,
2527 build_fold_addr_expr (var
),
2529 &asan_ctor_statements
);
2531 fn
= builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS
);
2532 append_to_statement_list (build_call_expr (fn
, 2,
2533 build_fold_addr_expr (var
),
2536 cgraph_build_static_cdtor ('D', dtor_statements
,
2537 MAX_RESERVED_INIT_PRIORITY
- 1);
2539 if (asan_ctor_statements
)
2540 cgraph_build_static_cdtor ('I', asan_ctor_statements
,
2541 MAX_RESERVED_INIT_PRIORITY
- 1);
2542 flag_sanitize
|= SANITIZE_ADDRESS
;
2545 /* Expand the ASAN_{LOAD,STORE} builtins. */
2548 asan_expand_check_ifn (gimple_stmt_iterator
*iter
, bool use_calls
)
2550 gimple g
= gsi_stmt (*iter
);
2551 location_t loc
= gimple_location (g
);
2553 HOST_WIDE_INT flags
= tree_to_shwi (gimple_call_arg (g
, 0));
2554 gcc_assert (flags
< ASAN_CHECK_LAST
);
2555 bool is_scalar_access
= (flags
& ASAN_CHECK_SCALAR_ACCESS
) != 0;
2556 bool is_store
= (flags
& ASAN_CHECK_STORE
) != 0;
2557 bool is_non_zero_len
= (flags
& ASAN_CHECK_NON_ZERO_LEN
) != 0;
2558 bool start_instrumented
= (flags
& ASAN_CHECK_START_INSTRUMENTED
) != 0;
2559 bool end_instrumented
= (flags
& ASAN_CHECK_END_INSTRUMENTED
) != 0;
2561 tree base
= gimple_call_arg (g
, 1);
2562 tree len
= gimple_call_arg (g
, 2);
2563 HOST_WIDE_INT align
= tree_to_shwi (gimple_call_arg (g
, 3));
2565 HOST_WIDE_INT size_in_bytes
2566 = is_scalar_access
&& tree_fits_shwi_p (len
) ? tree_to_shwi (len
) : -1;
2570 /* Instrument using callbacks. */
2572 = gimple_build_assign_with_ops (NOP_EXPR
,
2573 make_ssa_name (pointer_sized_int_node
,
2576 gimple_set_location (g
, loc
);
2577 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
2578 tree base_addr
= gimple_assign_lhs (g
);
2581 tree fun
= check_func (is_store
, size_in_bytes
, &nargs
);
2583 g
= gimple_build_call (fun
, 1, base_addr
);
2586 gcc_assert (nargs
== 2);
2587 g
= gimple_build_assign_with_ops (NOP_EXPR
,
2588 make_ssa_name (pointer_sized_int_node
,
2591 gimple_set_location (g
, loc
);
2592 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
2593 tree sz_arg
= gimple_assign_lhs (g
);
2594 g
= gimple_build_call (fun
, nargs
, base_addr
, sz_arg
);
2596 gimple_set_location (g
, loc
);
2597 gsi_replace (iter
, g
, false);
2601 HOST_WIDE_INT real_size_in_bytes
= size_in_bytes
== -1 ? 1 : size_in_bytes
;
2603 tree shadow_ptr_type
= shadow_ptr_types
[real_size_in_bytes
== 16 ? 1 : 0];
2604 tree shadow_type
= TREE_TYPE (shadow_ptr_type
);
2606 gimple_stmt_iterator gsi
= *iter
;
2608 if (!is_non_zero_len
)
2610 /* So, the length of the memory area to asan-protect is
2611 non-constant. Let's guard the generated instrumentation code
2616 //asan instrumentation code goes here.
2618 // falltrough instructions, starting with *ITER. */
2620 g
= gimple_build_cond (NE_EXPR
,
2622 build_int_cst (TREE_TYPE (len
), 0),
2623 NULL_TREE
, NULL_TREE
);
2624 gimple_set_location (g
, loc
);
2626 basic_block then_bb
, fallthrough_bb
;
2627 insert_if_then_before_iter (g
, iter
, /*then_more_likely_p=*/true,
2628 &then_bb
, &fallthrough_bb
);
2629 /* Note that fallthrough_bb starts with the statement that was
2630 pointed to by ITER. */
2632 /* The 'then block' of the 'if (len != 0) condition is where
2633 we'll generate the asan instrumentation code now. */
2634 gsi
= gsi_last_bb (then_bb
);
2637 /* Get an iterator on the point where we can add the condition
2638 statement for the instrumentation. */
2639 basic_block then_bb
, else_bb
;
2640 gsi
= create_cond_insert_point (&gsi
, /*before_p*/false,
2641 /*then_more_likely_p=*/false,
2642 /*create_then_fallthru_edge=*/false,
2646 g
= gimple_build_assign_with_ops (NOP_EXPR
,
2647 make_ssa_name (pointer_sized_int_node
,
2650 gimple_set_location (g
, loc
);
2651 gsi_insert_before (&gsi
, g
, GSI_NEW_STMT
);
2652 tree base_addr
= gimple_assign_lhs (g
);
2655 if (real_size_in_bytes
>= 8)
2657 tree shadow
= build_shadow_mem_access (&gsi
, loc
, base_addr
,
2663 /* Slow path for 1, 2 and 4 byte accesses. */
2665 if (!start_instrumented
)
2667 /* Test (shadow != 0)
2668 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
2669 tree shadow
= build_shadow_mem_access (&gsi
, loc
, base_addr
,
2671 gimple shadow_test
= build_assign (NE_EXPR
, shadow
, 0);
2672 gimple_seq seq
= NULL
;
2673 gimple_seq_add_stmt (&seq
, shadow_test
);
2674 /* Aligned (>= 8 bytes) can test just
2675 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
2679 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
,
2681 gimple_seq_add_stmt (&seq
,
2682 build_type_cast (shadow_type
,
2683 gimple_seq_last (seq
)));
2684 if (real_size_in_bytes
> 1)
2685 gimple_seq_add_stmt (&seq
,
2686 build_assign (PLUS_EXPR
,
2687 gimple_seq_last (seq
),
2688 real_size_in_bytes
- 1));
2689 t
= gimple_assign_lhs (gimple_seq_last_stmt (seq
));
2692 t
= build_int_cst (shadow_type
, real_size_in_bytes
- 1);
2693 gimple_seq_add_stmt (&seq
, build_assign (GE_EXPR
, t
, shadow
));
2694 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
, shadow_test
,
2695 gimple_seq_last (seq
)));
2696 t
= gimple_assign_lhs (gimple_seq_last (seq
));
2697 gimple_seq_set_location (seq
, loc
);
2698 gsi_insert_seq_after (&gsi
, seq
, GSI_CONTINUE_LINKING
);
2701 /* For non-constant, misaligned or otherwise weird access sizes,
2702 check first and last byte. */
2703 if (size_in_bytes
== -1 && !end_instrumented
)
2705 g
= gimple_build_assign_with_ops (MINUS_EXPR
,
2706 make_ssa_name (pointer_sized_int_node
, NULL
),
2708 build_int_cst (pointer_sized_int_node
, 1));
2709 gimple_set_location (g
, loc
);
2710 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
2711 tree last
= gimple_assign_lhs (g
);
2712 g
= gimple_build_assign_with_ops (PLUS_EXPR
,
2713 make_ssa_name (pointer_sized_int_node
, NULL
),
2716 gimple_set_location (g
, loc
);
2717 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
2718 tree base_end_addr
= gimple_assign_lhs (g
);
2720 tree shadow
= build_shadow_mem_access (&gsi
, loc
, base_end_addr
,
2722 gimple shadow_test
= build_assign (NE_EXPR
, shadow
, 0);
2723 gimple_seq seq
= NULL
;
2724 gimple_seq_add_stmt (&seq
, shadow_test
);
2725 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
,
2727 gimple_seq_add_stmt (&seq
, build_type_cast (shadow_type
,
2728 gimple_seq_last (seq
)));
2729 gimple_seq_add_stmt (&seq
, build_assign (GE_EXPR
,
2730 gimple_seq_last (seq
),
2732 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
, shadow_test
,
2733 gimple_seq_last (seq
)));
2734 if (!start_instrumented
)
2735 gimple_seq_add_stmt (&seq
, build_assign (BIT_IOR_EXPR
, t
,
2736 gimple_seq_last (seq
)));
2737 t
= gimple_assign_lhs (gimple_seq_last (seq
));
2738 gimple_seq_set_location (seq
, loc
);
2739 gsi_insert_seq_after (&gsi
, seq
, GSI_CONTINUE_LINKING
);
2743 g
= gimple_build_cond (NE_EXPR
, t
, build_int_cst (TREE_TYPE (t
), 0),
2744 NULL_TREE
, NULL_TREE
);
2745 gimple_set_location (g
, loc
);
2746 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
2748 /* Generate call to the run-time library (e.g. __asan_report_load8). */
2749 gsi
= gsi_start_bb (then_bb
);
2751 tree fun
= report_error_func (is_store
, size_in_bytes
, &nargs
);
2752 g
= gimple_build_call (fun
, nargs
, base_addr
, len
);
2753 gimple_set_location (g
, loc
);
2754 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
2756 gsi_remove (iter
, true);
2757 *iter
= gsi_start_bb (else_bb
);
2762 /* Instrument the current function. */
2765 asan_instrument (void)
2767 if (shadow_ptr_types
[0] == NULL_TREE
)
2768 asan_init_shadow_ptr_types ();
2769 transform_statements ();
2776 return (flag_sanitize
& SANITIZE_ADDRESS
) != 0
2777 && !lookup_attribute ("no_sanitize_address",
2778 DECL_ATTRIBUTES (current_function_decl
));
2783 const pass_data pass_data_asan
=
2785 GIMPLE_PASS
, /* type */
2787 OPTGROUP_NONE
, /* optinfo_flags */
2788 TV_NONE
, /* tv_id */
2789 ( PROP_ssa
| PROP_cfg
| PROP_gimple_leh
), /* properties_required */
2790 0, /* properties_provided */
2791 0, /* properties_destroyed */
2792 0, /* todo_flags_start */
2793 TODO_update_ssa
, /* todo_flags_finish */
2796 class pass_asan
: public gimple_opt_pass
2799 pass_asan (gcc::context
*ctxt
)
2800 : gimple_opt_pass (pass_data_asan
, ctxt
)
2803 /* opt_pass methods: */
2804 opt_pass
* clone () { return new pass_asan (m_ctxt
); }
2805 virtual bool gate (function
*) { return gate_asan (); }
2806 virtual unsigned int execute (function
*) { return asan_instrument (); }
2808 }; // class pass_asan
2813 make_pass_asan (gcc::context
*ctxt
)
2815 return new pass_asan (ctxt
);
2820 const pass_data pass_data_asan_O0
=
2822 GIMPLE_PASS
, /* type */
2824 OPTGROUP_NONE
, /* optinfo_flags */
2825 TV_NONE
, /* tv_id */
2826 ( PROP_ssa
| PROP_cfg
| PROP_gimple_leh
), /* properties_required */
2827 0, /* properties_provided */
2828 0, /* properties_destroyed */
2829 0, /* todo_flags_start */
2830 TODO_update_ssa
, /* todo_flags_finish */
2833 class pass_asan_O0
: public gimple_opt_pass
2836 pass_asan_O0 (gcc::context
*ctxt
)
2837 : gimple_opt_pass (pass_data_asan_O0
, ctxt
)
2840 /* opt_pass methods: */
2841 virtual bool gate (function
*) { return !optimize
&& gate_asan (); }
2842 virtual unsigned int execute (function
*) { return asan_instrument (); }
2844 }; // class pass_asan_O0
2849 make_pass_asan_O0 (gcc::context
*ctxt
)
2851 return new pass_asan_O0 (ctxt
);
2854 /* Perform optimization of sanitize functions. */
2858 const pass_data pass_data_sanopt
=
2860 GIMPLE_PASS
, /* type */
2861 "sanopt", /* name */
2862 OPTGROUP_NONE
, /* optinfo_flags */
2863 TV_NONE
, /* tv_id */
2864 ( PROP_ssa
| PROP_cfg
| PROP_gimple_leh
), /* properties_required */
2865 0, /* properties_provided */
2866 0, /* properties_destroyed */
2867 0, /* todo_flags_start */
2868 TODO_update_ssa
, /* todo_flags_finish */
2871 class pass_sanopt
: public gimple_opt_pass
2874 pass_sanopt (gcc::context
*ctxt
)
2875 : gimple_opt_pass (pass_data_sanopt
, ctxt
)
2878 /* opt_pass methods: */
2879 virtual bool gate (function
*) { return flag_sanitize
; }
2880 virtual unsigned int execute (function
*);
2882 }; // class pass_sanopt
2885 pass_sanopt::execute (function
*fun
)
2889 int asan_num_accesses
= 0;
2890 if (flag_sanitize
& SANITIZE_ADDRESS
)
2892 gimple_stmt_iterator gsi
;
2893 FOR_EACH_BB_FN (bb
, fun
)
2894 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2896 gimple stmt
= gsi_stmt (gsi
);
2897 if (is_gimple_call (stmt
) && gimple_call_internal_p (stmt
)
2898 && gimple_call_internal_fn (stmt
) == IFN_ASAN_CHECK
)
2899 ++asan_num_accesses
;
2903 bool use_calls
= ASAN_INSTRUMENTATION_WITH_CALL_THRESHOLD
< INT_MAX
2904 && asan_num_accesses
>= ASAN_INSTRUMENTATION_WITH_CALL_THRESHOLD
;
2906 FOR_EACH_BB_FN (bb
, fun
)
2908 gimple_stmt_iterator gsi
;
2909 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); )
2911 gimple stmt
= gsi_stmt (gsi
);
2912 bool no_next
= false;
2914 if (!is_gimple_call (stmt
))
2920 if (gimple_call_internal_p (stmt
))
2922 enum internal_fn ifn
= gimple_call_internal_fn (stmt
);
2925 case IFN_UBSAN_NULL
:
2926 no_next
= ubsan_expand_null_ifn (&gsi
);
2928 case IFN_UBSAN_BOUNDS
:
2929 no_next
= ubsan_expand_bounds_ifn (&gsi
);
2931 case IFN_UBSAN_OBJECT_SIZE
:
2932 no_next
= ubsan_expand_objsize_ifn (&gsi
);
2934 case IFN_ASAN_CHECK
:
2935 no_next
= asan_expand_check_ifn (&gsi
, use_calls
);
2942 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2944 fprintf (dump_file
, "Optimized\n ");
2945 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
2946 fprintf (dump_file
, "\n");
2959 make_pass_sanopt (gcc::context
*ctxt
)
2961 return new pass_sanopt (ctxt
);
2964 #include "gt-asan.h"