1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2015 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
32 #include "fold-const.h"
34 #include "internal-fn.h"
36 #include "gimple-iterator.h"
39 #include "stor-layout.h"
40 #include "tree-iterator.h"
42 #include "stringpool.h"
43 #include "tree-ssanames.h"
44 #include "tree-pass.h"
46 #include "gimple-pretty-print.h"
49 #include "insn-config.h"
56 #include "insn-codes.h"
60 #include "langhooks.h"
61 #include "alloc-pool.h"
63 #include "gimple-builder.h"
69 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
70 with <2x slowdown on average.
72 The tool consists of two parts:
73 instrumentation module (this file) and a run-time library.
74 The instrumentation module adds a run-time check before every memory insn.
75 For a 8- or 16- byte load accessing address X:
76 ShadowAddr = (X >> 3) + Offset
77 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
79 __asan_report_load8(X);
80 For a load of N bytes (N=1, 2 or 4) from address X:
81 ShadowAddr = (X >> 3) + Offset
82 ShadowValue = *(char*)ShadowAddr;
84 if ((X & 7) + N - 1 > ShadowValue)
85 __asan_report_loadN(X);
86 Stores are instrumented similarly, but using __asan_report_storeN functions.
87 A call too __asan_init_vN() is inserted to the list of module CTORs.
88 N is the version number of the AddressSanitizer API. The changes between the
89 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
91 The run-time library redefines malloc (so that redzone are inserted around
92 the allocated memory) and free (so that reuse of free-ed memory is delayed),
93 provides __asan_report* and __asan_init_vN functions.
96 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
98 The current implementation supports detection of out-of-bounds and
99 use-after-free in the heap, on the stack and for global variables.
101 [Protection of stack variables]
103 To understand how detection of out-of-bounds and use-after-free works
104 for stack variables, lets look at this example on x86_64 where the
105 stack grows downward:
119 For this function, the stack protected by asan will be organized as
120 follows, from the top of the stack to the bottom:
122 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
124 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
125 the next slot be 32 bytes aligned; this one is called Partial
126 Redzone; this 32 bytes alignment is an asan constraint]
128 Slot 3/ [24 bytes for variable 'a']
130 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
132 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
134 Slot 6/ [8 bytes for variable 'b']
136 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
139 The 32 bytes of LEFT red zone at the bottom of the stack can be
142 1/ The first 8 bytes contain a magical asan number that is always
145 2/ The following 8 bytes contains a pointer to a string (to be
146 parsed at runtime by the runtime asan library), which format is
149 "<function-name> <space> <num-of-variables-on-the-stack>
150 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
151 <length-of-var-in-bytes> ){n} "
153 where '(...){n}' means the content inside the parenthesis occurs 'n'
154 times, with 'n' being the number of variables on the stack.
156 3/ The following 8 bytes contain the PC of the current function which
157 will be used by the run-time library to print an error message.
159 4/ The following 8 bytes are reserved for internal use by the run-time.
161 The shadow memory for that stack layout is going to look like this:
163 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
164 The F1 byte pattern is a magic number called
165 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
166 the memory for that shadow byte is part of a the LEFT red zone
167 intended to seat at the bottom of the variables on the stack.
169 - content of shadow memory 8 bytes for slots 6 and 5:
170 0xF4F4F400. The F4 byte pattern is a magic number
171 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
172 memory region for this shadow byte is a PARTIAL red zone
173 intended to pad a variable A, so that the slot following
174 {A,padding} is 32 bytes aligned.
176 Note that the fact that the least significant byte of this
177 shadow memory content is 00 means that 8 bytes of its
178 corresponding memory (which corresponds to the memory of
179 variable 'b') is addressable.
181 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
182 The F2 byte pattern is a magic number called
183 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
184 region for this shadow byte is a MIDDLE red zone intended to
185 seat between two 32 aligned slots of {variable,padding}.
187 - content of shadow memory 8 bytes for slot 3 and 2:
188 0xF4000000. This represents is the concatenation of
189 variable 'a' and the partial red zone following it, like what we
190 had for variable 'b'. The least significant 3 bytes being 00
191 means that the 3 bytes of variable 'a' are addressable.
193 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
194 The F3 byte pattern is a magic number called
195 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
196 region for this shadow byte is a RIGHT red zone intended to seat
197 at the top of the variables of the stack.
199 Note that the real variable layout is done in expand_used_vars in
200 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
201 stack variables as well as the different red zones, emits some
202 prologue code to populate the shadow memory as to poison (mark as
203 non-accessible) the regions of the red zones and mark the regions of
204 stack variables as accessible, and emit some epilogue code to
205 un-poison (mark as accessible) the regions of red zones right before
208 [Protection of global variables]
210 The basic idea is to insert a red zone between two global variables
211 and install a constructor function that calls the asan runtime to do
212 the populating of the relevant shadow memory regions at load time.
214 So the global variables are laid out as to insert a red zone between
215 them. The size of the red zones is so that each variable starts on a
218 Then a constructor function is installed so that, for each global
219 variable, it calls the runtime asan library function
220 __asan_register_globals_with an instance of this type:
224 // Address of the beginning of the global variable.
227 // Initial size of the global variable.
230 // Size of the global variable + size of the red zone. This
231 // size is 32 bytes aligned.
232 uptr __size_with_redzone;
234 // Name of the global variable.
237 // Name of the module where the global variable is declared.
238 const void *__module_name;
240 // 1 if it has dynamic initialization, 0 otherwise.
241 uptr __has_dynamic_init;
243 // A pointer to struct that contains source location, could be NULL.
244 __asan_global_source_location *__location;
247 A destructor function that calls the runtime asan library function
248 _asan_unregister_globals is also installed. */
250 static unsigned HOST_WIDE_INT asan_shadow_offset_value
;
251 static bool asan_shadow_offset_computed
;
252 static vec
<char *> sanitized_sections
;
254 /* Sets shadow offset to value in string VAL. */
257 set_asan_shadow_offset (const char *val
)
262 #ifdef HAVE_LONG_LONG
263 asan_shadow_offset_value
= strtoull (val
, &endp
, 0);
265 asan_shadow_offset_value
= strtoul (val
, &endp
, 0);
267 if (!(*val
!= '\0' && *endp
== '\0' && errno
== 0))
270 asan_shadow_offset_computed
= true;
275 /* Set list of user-defined sections that need to be sanitized. */
278 set_sanitized_sections (const char *sections
)
282 FOR_EACH_VEC_ELT (sanitized_sections
, i
, pat
)
284 sanitized_sections
.truncate (0);
286 for (const char *s
= sections
; *s
; )
289 for (end
= s
; *end
&& *end
!= ','; ++end
);
290 size_t len
= end
- s
;
291 sanitized_sections
.safe_push (xstrndup (s
, len
));
292 s
= *end
? end
+ 1 : end
;
296 /* Checks whether section SEC should be sanitized. */
299 section_sanitized_p (const char *sec
)
303 FOR_EACH_VEC_ELT (sanitized_sections
, i
, pat
)
304 if (fnmatch (pat
, sec
, FNM_PERIOD
) == 0)
309 /* Returns Asan shadow offset. */
311 static unsigned HOST_WIDE_INT
312 asan_shadow_offset ()
314 if (!asan_shadow_offset_computed
)
316 asan_shadow_offset_computed
= true;
317 asan_shadow_offset_value
= targetm
.asan_shadow_offset ();
319 return asan_shadow_offset_value
;
322 alias_set_type asan_shadow_set
= -1;
324 /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
325 alias set is used for all shadow memory accesses. */
326 static GTY(()) tree shadow_ptr_types
[2];
328 /* Decl for __asan_option_detect_stack_use_after_return. */
329 static GTY(()) tree asan_detect_stack_use_after_return
;
331 /* Various flags for Asan builtins. */
332 enum asan_check_flags
334 ASAN_CHECK_STORE
= 1 << 0,
335 ASAN_CHECK_SCALAR_ACCESS
= 1 << 1,
336 ASAN_CHECK_NON_ZERO_LEN
= 1 << 2,
337 ASAN_CHECK_LAST
= 1 << 3
340 /* Hashtable support for memory references used by gimple
343 /* This type represents a reference to a memory region. */
346 /* The expression of the beginning of the memory region. */
349 /* The size of the access. */
350 HOST_WIDE_INT access_size
;
352 /* Pool allocation new operator. */
353 inline void *operator new (size_t)
355 return pool
.allocate ();
358 /* Delete operator utilizing pool allocation. */
359 inline void operator delete (void *ptr
)
361 pool
.remove ((asan_mem_ref
*) ptr
);
364 /* Memory allocation pool. */
365 static pool_allocator
<asan_mem_ref
> pool
;
368 pool_allocator
<asan_mem_ref
> asan_mem_ref::pool ("asan_mem_ref", 10);
370 /* Initializes an instance of asan_mem_ref. */
373 asan_mem_ref_init (asan_mem_ref
*ref
, tree start
, HOST_WIDE_INT access_size
)
376 ref
->access_size
= access_size
;
379 /* Allocates memory for an instance of asan_mem_ref into the memory
380 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
381 START is the address of (or the expression pointing to) the
382 beginning of memory reference. ACCESS_SIZE is the size of the
383 access to the referenced memory. */
386 asan_mem_ref_new (tree start
, HOST_WIDE_INT access_size
)
388 asan_mem_ref
*ref
= new asan_mem_ref
;
390 asan_mem_ref_init (ref
, start
, access_size
);
394 /* This builds and returns a pointer to the end of the memory region
395 that starts at START and of length LEN. */
398 asan_mem_ref_get_end (tree start
, tree len
)
400 if (len
== NULL_TREE
|| integer_zerop (len
))
403 if (!ptrofftype_p (len
))
404 len
= convert_to_ptrofftype (len
);
406 return fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (start
), start
, len
);
409 /* Return a tree expression that represents the end of the referenced
410 memory region. Beware that this function can actually build a new
414 asan_mem_ref_get_end (const asan_mem_ref
*ref
, tree len
)
416 return asan_mem_ref_get_end (ref
->start
, len
);
419 struct asan_mem_ref_hasher
: nofree_ptr_hash
<asan_mem_ref
>
421 static inline hashval_t
hash (const asan_mem_ref
*);
422 static inline bool equal (const asan_mem_ref
*, const asan_mem_ref
*);
425 /* Hash a memory reference. */
428 asan_mem_ref_hasher::hash (const asan_mem_ref
*mem_ref
)
430 return iterative_hash_expr (mem_ref
->start
, 0);
433 /* Compare two memory references. We accept the length of either
434 memory references to be NULL_TREE. */
437 asan_mem_ref_hasher::equal (const asan_mem_ref
*m1
,
438 const asan_mem_ref
*m2
)
440 return operand_equal_p (m1
->start
, m2
->start
, 0);
443 static hash_table
<asan_mem_ref_hasher
> *asan_mem_ref_ht
;
445 /* Returns a reference to the hash table containing memory references.
446 This function ensures that the hash table is created. Note that
447 this hash table is updated by the function
448 update_mem_ref_hash_table. */
450 static hash_table
<asan_mem_ref_hasher
> *
451 get_mem_ref_hash_table ()
453 if (!asan_mem_ref_ht
)
454 asan_mem_ref_ht
= new hash_table
<asan_mem_ref_hasher
> (10);
456 return asan_mem_ref_ht
;
459 /* Clear all entries from the memory references hash table. */
462 empty_mem_ref_hash_table ()
465 asan_mem_ref_ht
->empty ();
468 /* Free the memory references hash table. */
471 free_mem_ref_resources ()
473 delete asan_mem_ref_ht
;
474 asan_mem_ref_ht
= NULL
;
476 asan_mem_ref::pool
.release ();
479 /* Return true iff the memory reference REF has been instrumented. */
482 has_mem_ref_been_instrumented (tree ref
, HOST_WIDE_INT access_size
)
485 asan_mem_ref_init (&r
, ref
, access_size
);
487 asan_mem_ref
*saved_ref
= get_mem_ref_hash_table ()->find (&r
);
488 return saved_ref
&& saved_ref
->access_size
>= access_size
;
491 /* Return true iff the memory reference REF has been instrumented. */
494 has_mem_ref_been_instrumented (const asan_mem_ref
*ref
)
496 return has_mem_ref_been_instrumented (ref
->start
, ref
->access_size
);
499 /* Return true iff access to memory region starting at REF and of
500 length LEN has been instrumented. */
503 has_mem_ref_been_instrumented (const asan_mem_ref
*ref
, tree len
)
505 HOST_WIDE_INT size_in_bytes
506 = tree_fits_shwi_p (len
) ? tree_to_shwi (len
) : -1;
508 return size_in_bytes
!= -1
509 && has_mem_ref_been_instrumented (ref
->start
, size_in_bytes
);
512 /* Set REF to the memory reference present in a gimple assignment
513 ASSIGNMENT. Return true upon successful completion, false
517 get_mem_ref_of_assignment (const gassign
*assignment
,
521 gcc_assert (gimple_assign_single_p (assignment
));
523 if (gimple_store_p (assignment
)
524 && !gimple_clobber_p (assignment
))
526 ref
->start
= gimple_assign_lhs (assignment
);
527 *ref_is_store
= true;
529 else if (gimple_assign_load_p (assignment
))
531 ref
->start
= gimple_assign_rhs1 (assignment
);
532 *ref_is_store
= false;
537 ref
->access_size
= int_size_in_bytes (TREE_TYPE (ref
->start
));
541 /* Return the memory references contained in a gimple statement
542 representing a builtin call that has to do with memory access. */
545 get_mem_refs_of_builtin_call (const gcall
*call
,
558 gcc_checking_assert (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
));
560 tree callee
= gimple_call_fndecl (call
);
561 tree source0
= NULL_TREE
, source1
= NULL_TREE
,
562 dest
= NULL_TREE
, len
= NULL_TREE
;
563 bool is_store
= true, got_reference_p
= false;
564 HOST_WIDE_INT access_size
= 1;
566 *intercepted_p
= asan_intercepted_p ((DECL_FUNCTION_CODE (callee
)));
568 switch (DECL_FUNCTION_CODE (callee
))
570 /* (s, s, n) style memops. */
572 case BUILT_IN_MEMCMP
:
573 source0
= gimple_call_arg (call
, 0);
574 source1
= gimple_call_arg (call
, 1);
575 len
= gimple_call_arg (call
, 2);
578 /* (src, dest, n) style memops. */
580 source0
= gimple_call_arg (call
, 0);
581 dest
= gimple_call_arg (call
, 1);
582 len
= gimple_call_arg (call
, 2);
585 /* (dest, src, n) style memops. */
586 case BUILT_IN_MEMCPY
:
587 case BUILT_IN_MEMCPY_CHK
:
588 case BUILT_IN_MEMMOVE
:
589 case BUILT_IN_MEMMOVE_CHK
:
590 case BUILT_IN_MEMPCPY
:
591 case BUILT_IN_MEMPCPY_CHK
:
592 dest
= gimple_call_arg (call
, 0);
593 source0
= gimple_call_arg (call
, 1);
594 len
= gimple_call_arg (call
, 2);
597 /* (dest, n) style memops. */
599 dest
= gimple_call_arg (call
, 0);
600 len
= gimple_call_arg (call
, 1);
603 /* (dest, x, n) style memops*/
604 case BUILT_IN_MEMSET
:
605 case BUILT_IN_MEMSET_CHK
:
606 dest
= gimple_call_arg (call
, 0);
607 len
= gimple_call_arg (call
, 2);
610 case BUILT_IN_STRLEN
:
611 source0
= gimple_call_arg (call
, 0);
612 len
= gimple_call_lhs (call
);
615 /* And now the __atomic* and __sync builtins.
616 These are handled differently from the classical memory memory
617 access builtins above. */
619 case BUILT_IN_ATOMIC_LOAD_1
:
620 case BUILT_IN_ATOMIC_LOAD_2
:
621 case BUILT_IN_ATOMIC_LOAD_4
:
622 case BUILT_IN_ATOMIC_LOAD_8
:
623 case BUILT_IN_ATOMIC_LOAD_16
:
627 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
628 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
629 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
630 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
631 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
633 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
634 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
635 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
636 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
637 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
639 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
640 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
641 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
642 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
643 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
645 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
646 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
647 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
648 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
649 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
651 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
652 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
653 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
654 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
655 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
657 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
658 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
659 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
660 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
662 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
663 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
664 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
665 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
666 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
668 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
669 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
670 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
671 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
672 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
674 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
675 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
676 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
677 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
678 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
680 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
681 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
682 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
683 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
684 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
686 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
687 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
688 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
689 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
690 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
692 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
693 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
694 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
695 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
697 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
698 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
699 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
700 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
701 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
703 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
704 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
705 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
706 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
707 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
709 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
710 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
711 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
712 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
713 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
715 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
716 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
717 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
718 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
719 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
721 case BUILT_IN_ATOMIC_EXCHANGE_1
:
722 case BUILT_IN_ATOMIC_EXCHANGE_2
:
723 case BUILT_IN_ATOMIC_EXCHANGE_4
:
724 case BUILT_IN_ATOMIC_EXCHANGE_8
:
725 case BUILT_IN_ATOMIC_EXCHANGE_16
:
727 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
728 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
729 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
730 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
731 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
733 case BUILT_IN_ATOMIC_STORE_1
:
734 case BUILT_IN_ATOMIC_STORE_2
:
735 case BUILT_IN_ATOMIC_STORE_4
:
736 case BUILT_IN_ATOMIC_STORE_8
:
737 case BUILT_IN_ATOMIC_STORE_16
:
739 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
740 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
741 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
742 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
743 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
745 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
746 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
747 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
748 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
749 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
751 case BUILT_IN_ATOMIC_AND_FETCH_1
:
752 case BUILT_IN_ATOMIC_AND_FETCH_2
:
753 case BUILT_IN_ATOMIC_AND_FETCH_4
:
754 case BUILT_IN_ATOMIC_AND_FETCH_8
:
755 case BUILT_IN_ATOMIC_AND_FETCH_16
:
757 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
758 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
759 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
760 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
761 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
763 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
764 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
765 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
766 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
767 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
769 case BUILT_IN_ATOMIC_OR_FETCH_1
:
770 case BUILT_IN_ATOMIC_OR_FETCH_2
:
771 case BUILT_IN_ATOMIC_OR_FETCH_4
:
772 case BUILT_IN_ATOMIC_OR_FETCH_8
:
773 case BUILT_IN_ATOMIC_OR_FETCH_16
:
775 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
776 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
777 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
778 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
779 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
781 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
782 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
783 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
784 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
785 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
787 case BUILT_IN_ATOMIC_FETCH_AND_1
:
788 case BUILT_IN_ATOMIC_FETCH_AND_2
:
789 case BUILT_IN_ATOMIC_FETCH_AND_4
:
790 case BUILT_IN_ATOMIC_FETCH_AND_8
:
791 case BUILT_IN_ATOMIC_FETCH_AND_16
:
793 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
794 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
795 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
796 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
797 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
799 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
800 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
801 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
802 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
803 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
805 case BUILT_IN_ATOMIC_FETCH_OR_1
:
806 case BUILT_IN_ATOMIC_FETCH_OR_2
:
807 case BUILT_IN_ATOMIC_FETCH_OR_4
:
808 case BUILT_IN_ATOMIC_FETCH_OR_8
:
809 case BUILT_IN_ATOMIC_FETCH_OR_16
:
811 dest
= gimple_call_arg (call
, 0);
812 /* DEST represents the address of a memory location.
813 instrument_derefs wants the memory location, so lets
814 dereference the address DEST before handing it to
815 instrument_derefs. */
816 if (TREE_CODE (dest
) == ADDR_EXPR
)
817 dest
= TREE_OPERAND (dest
, 0);
818 else if (TREE_CODE (dest
) == SSA_NAME
|| TREE_CODE (dest
) == INTEGER_CST
)
819 dest
= build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (dest
)),
820 dest
, build_int_cst (TREE_TYPE (dest
), 0));
824 access_size
= int_size_in_bytes (TREE_TYPE (dest
));
828 /* The other builtins memory access are not instrumented in this
829 function because they either don't have any length parameter,
830 or their length parameter is just a limit. */
834 if (len
!= NULL_TREE
)
836 if (source0
!= NULL_TREE
)
838 src0
->start
= source0
;
839 src0
->access_size
= access_size
;
841 *src0_is_store
= false;
844 if (source1
!= NULL_TREE
)
846 src1
->start
= source1
;
847 src1
->access_size
= access_size
;
849 *src1_is_store
= false;
852 if (dest
!= NULL_TREE
)
855 dst
->access_size
= access_size
;
857 *dst_is_store
= true;
860 got_reference_p
= true;
865 dst
->access_size
= access_size
;
866 *dst_len
= NULL_TREE
;
867 *dst_is_store
= is_store
;
868 *dest_is_deref
= true;
869 got_reference_p
= true;
872 return got_reference_p
;
875 /* Return true iff a given gimple statement has been instrumented.
876 Note that the statement is "defined" by the memory references it
880 has_stmt_been_instrumented_p (gimple stmt
)
882 if (gimple_assign_single_p (stmt
))
886 asan_mem_ref_init (&r
, NULL
, 1);
888 if (get_mem_ref_of_assignment (as_a
<gassign
*> (stmt
), &r
,
890 return has_mem_ref_been_instrumented (&r
);
892 else if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
894 asan_mem_ref src0
, src1
, dest
;
895 asan_mem_ref_init (&src0
, NULL
, 1);
896 asan_mem_ref_init (&src1
, NULL
, 1);
897 asan_mem_ref_init (&dest
, NULL
, 1);
899 tree src0_len
= NULL_TREE
, src1_len
= NULL_TREE
, dest_len
= NULL_TREE
;
900 bool src0_is_store
= false, src1_is_store
= false,
901 dest_is_store
= false, dest_is_deref
= false, intercepted_p
= true;
902 if (get_mem_refs_of_builtin_call (as_a
<gcall
*> (stmt
),
903 &src0
, &src0_len
, &src0_is_store
,
904 &src1
, &src1_len
, &src1_is_store
,
905 &dest
, &dest_len
, &dest_is_store
,
906 &dest_is_deref
, &intercepted_p
))
908 if (src0
.start
!= NULL_TREE
909 && !has_mem_ref_been_instrumented (&src0
, src0_len
))
912 if (src1
.start
!= NULL_TREE
913 && !has_mem_ref_been_instrumented (&src1
, src1_len
))
916 if (dest
.start
!= NULL_TREE
917 && !has_mem_ref_been_instrumented (&dest
, dest_len
))
926 /* Insert a memory reference into the hash table. */
929 update_mem_ref_hash_table (tree ref
, HOST_WIDE_INT access_size
)
931 hash_table
<asan_mem_ref_hasher
> *ht
= get_mem_ref_hash_table ();
934 asan_mem_ref_init (&r
, ref
, access_size
);
936 asan_mem_ref
**slot
= ht
->find_slot (&r
, INSERT
);
937 if (*slot
== NULL
|| (*slot
)->access_size
< access_size
)
938 *slot
= asan_mem_ref_new (ref
, access_size
);
941 /* Initialize shadow_ptr_types array. */
944 asan_init_shadow_ptr_types (void)
946 asan_shadow_set
= new_alias_set ();
947 shadow_ptr_types
[0] = build_distinct_type_copy (signed_char_type_node
);
948 TYPE_ALIAS_SET (shadow_ptr_types
[0]) = asan_shadow_set
;
949 shadow_ptr_types
[0] = build_pointer_type (shadow_ptr_types
[0]);
950 shadow_ptr_types
[1] = build_distinct_type_copy (short_integer_type_node
);
951 TYPE_ALIAS_SET (shadow_ptr_types
[1]) = asan_shadow_set
;
952 shadow_ptr_types
[1] = build_pointer_type (shadow_ptr_types
[1]);
953 initialize_sanitizer_builtins ();
956 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
959 asan_pp_string (pretty_printer
*pp
)
961 const char *buf
= pp_formatted_text (pp
);
962 size_t len
= strlen (buf
);
963 tree ret
= build_string (len
+ 1, buf
);
965 = build_array_type (TREE_TYPE (shadow_ptr_types
[0]),
966 build_index_type (size_int (len
)));
967 TREE_READONLY (ret
) = 1;
968 TREE_STATIC (ret
) = 1;
969 return build1 (ADDR_EXPR
, shadow_ptr_types
[0], ret
);
972 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
975 asan_shadow_cst (unsigned char shadow_bytes
[4])
978 unsigned HOST_WIDE_INT val
= 0;
979 gcc_assert (WORDS_BIG_ENDIAN
== BYTES_BIG_ENDIAN
);
980 for (i
= 0; i
< 4; i
++)
981 val
|= (unsigned HOST_WIDE_INT
) shadow_bytes
[BYTES_BIG_ENDIAN
? 3 - i
: i
]
982 << (BITS_PER_UNIT
* i
);
983 return gen_int_mode (val
, SImode
);
986 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
990 asan_clear_shadow (rtx shadow_mem
, HOST_WIDE_INT len
)
992 rtx_insn
*insn
, *insns
, *jump
;
993 rtx_code_label
*top_label
;
997 clear_storage (shadow_mem
, GEN_INT (len
), BLOCK_OP_NORMAL
);
998 insns
= get_insns ();
1000 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
1003 if (insn
== NULL_RTX
)
1009 gcc_assert ((len
& 3) == 0);
1010 top_label
= gen_label_rtx ();
1011 addr
= copy_to_mode_reg (Pmode
, XEXP (shadow_mem
, 0));
1012 shadow_mem
= adjust_automodify_address (shadow_mem
, SImode
, addr
, 0);
1013 end
= force_reg (Pmode
, plus_constant (Pmode
, addr
, len
));
1014 emit_label (top_label
);
1016 emit_move_insn (shadow_mem
, const0_rtx
);
1017 tmp
= expand_simple_binop (Pmode
, PLUS
, addr
, gen_int_mode (4, Pmode
), addr
,
1018 true, OPTAB_LIB_WIDEN
);
1020 emit_move_insn (addr
, tmp
);
1021 emit_cmp_and_jump_insns (addr
, end
, LT
, NULL_RTX
, Pmode
, true, top_label
);
1022 jump
= get_last_insn ();
1023 gcc_assert (JUMP_P (jump
));
1024 add_int_reg_note (jump
, REG_BR_PROB
, REG_BR_PROB_BASE
* 80 / 100);
1028 asan_function_start (void)
1030 section
*fnsec
= function_section (current_function_decl
);
1031 switch_to_section (fnsec
);
1032 ASM_OUTPUT_DEBUG_LABEL (asm_out_file
, "LASANPC",
1033 current_function_funcdef_no
);
1036 /* Insert code to protect stack vars. The prologue sequence should be emitted
1037 directly, epilogue sequence returned. BASE is the register holding the
1038 stack base, against which OFFSETS array offsets are relative to, OFFSETS
1039 array contains pairs of offsets in reverse order, always the end offset
1040 of some gap that needs protection followed by starting offset,
1041 and DECLS is an array of representative decls for each var partition.
1042 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1043 elements long (OFFSETS include gap before the first variable as well
1044 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
1045 register which stack vars DECL_RTLs are based on. Either BASE should be
1046 assigned to PBASE, when not doing use after return protection, or
1047 corresponding address based on __asan_stack_malloc* return value. */
1050 asan_emit_stack_protection (rtx base
, rtx pbase
, unsigned int alignb
,
1051 HOST_WIDE_INT
*offsets
, tree
*decls
, int length
)
1053 rtx shadow_base
, shadow_mem
, ret
, mem
, orig_base
;
1054 rtx_code_label
*lab
;
1057 unsigned char shadow_bytes
[4];
1058 HOST_WIDE_INT base_offset
= offsets
[length
- 1];
1059 HOST_WIDE_INT base_align_bias
= 0, offset
, prev_offset
;
1060 HOST_WIDE_INT asan_frame_size
= offsets
[0] - base_offset
;
1061 HOST_WIDE_INT last_offset
, last_size
;
1063 unsigned char cur_shadow_byte
= ASAN_STACK_MAGIC_LEFT
;
1064 tree str_cst
, decl
, id
;
1065 int use_after_return_class
= -1;
1067 if (shadow_ptr_types
[0] == NULL_TREE
)
1068 asan_init_shadow_ptr_types ();
1070 /* First of all, prepare the description string. */
1071 pretty_printer asan_pp
;
1073 pp_decimal_int (&asan_pp
, length
/ 2 - 1);
1074 pp_space (&asan_pp
);
1075 for (l
= length
- 2; l
; l
-= 2)
1077 tree decl
= decls
[l
/ 2 - 1];
1078 pp_wide_integer (&asan_pp
, offsets
[l
] - base_offset
);
1079 pp_space (&asan_pp
);
1080 pp_wide_integer (&asan_pp
, offsets
[l
- 1] - offsets
[l
]);
1081 pp_space (&asan_pp
);
1082 if (DECL_P (decl
) && DECL_NAME (decl
))
1084 pp_decimal_int (&asan_pp
, IDENTIFIER_LENGTH (DECL_NAME (decl
)));
1085 pp_space (&asan_pp
);
1086 pp_tree_identifier (&asan_pp
, DECL_NAME (decl
));
1089 pp_string (&asan_pp
, "9 <unknown>");
1090 pp_space (&asan_pp
);
1092 str_cst
= asan_pp_string (&asan_pp
);
1094 /* Emit the prologue sequence. */
1095 if (asan_frame_size
> 32 && asan_frame_size
<= 65536 && pbase
1096 && ASAN_USE_AFTER_RETURN
)
1098 use_after_return_class
= floor_log2 (asan_frame_size
- 1) - 5;
1099 /* __asan_stack_malloc_N guarantees alignment
1100 N < 6 ? (64 << N) : 4096 bytes. */
1101 if (alignb
> (use_after_return_class
< 6
1102 ? (64U << use_after_return_class
) : 4096U))
1103 use_after_return_class
= -1;
1104 else if (alignb
> ASAN_RED_ZONE_SIZE
&& (asan_frame_size
& (alignb
- 1)))
1105 base_align_bias
= ((asan_frame_size
+ alignb
- 1)
1106 & ~(alignb
- HOST_WIDE_INT_1
)) - asan_frame_size
;
1108 /* Align base if target is STRICT_ALIGNMENT. */
1109 if (STRICT_ALIGNMENT
)
1110 base
= expand_binop (Pmode
, and_optab
, base
,
1111 gen_int_mode (-((GET_MODE_ALIGNMENT (SImode
)
1112 << ASAN_SHADOW_SHIFT
)
1113 / BITS_PER_UNIT
), Pmode
), NULL_RTX
,
1116 if (use_after_return_class
== -1 && pbase
)
1117 emit_move_insn (pbase
, base
);
1119 base
= expand_binop (Pmode
, add_optab
, base
,
1120 gen_int_mode (base_offset
- base_align_bias
, Pmode
),
1121 NULL_RTX
, 1, OPTAB_DIRECT
);
1122 orig_base
= NULL_RTX
;
1123 if (use_after_return_class
!= -1)
1125 if (asan_detect_stack_use_after_return
== NULL_TREE
)
1127 id
= get_identifier ("__asan_option_detect_stack_use_after_return");
1128 decl
= build_decl (BUILTINS_LOCATION
, VAR_DECL
, id
,
1130 SET_DECL_ASSEMBLER_NAME (decl
, id
);
1131 TREE_ADDRESSABLE (decl
) = 1;
1132 DECL_ARTIFICIAL (decl
) = 1;
1133 DECL_IGNORED_P (decl
) = 1;
1134 DECL_EXTERNAL (decl
) = 1;
1135 TREE_STATIC (decl
) = 1;
1136 TREE_PUBLIC (decl
) = 1;
1137 TREE_USED (decl
) = 1;
1138 asan_detect_stack_use_after_return
= decl
;
1140 orig_base
= gen_reg_rtx (Pmode
);
1141 emit_move_insn (orig_base
, base
);
1142 ret
= expand_normal (asan_detect_stack_use_after_return
);
1143 lab
= gen_label_rtx ();
1144 int very_likely
= REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1);
1145 emit_cmp_and_jump_insns (ret
, const0_rtx
, EQ
, NULL_RTX
,
1146 VOIDmode
, 0, lab
, very_likely
);
1147 snprintf (buf
, sizeof buf
, "__asan_stack_malloc_%d",
1148 use_after_return_class
);
1149 ret
= init_one_libfunc (buf
);
1150 rtx addr
= convert_memory_address (ptr_mode
, base
);
1151 ret
= emit_library_call_value (ret
, NULL_RTX
, LCT_NORMAL
, ptr_mode
, 2,
1152 GEN_INT (asan_frame_size
1154 TYPE_MODE (pointer_sized_int_node
),
1156 ret
= convert_memory_address (Pmode
, ret
);
1157 emit_move_insn (base
, ret
);
1159 emit_move_insn (pbase
, expand_binop (Pmode
, add_optab
, base
,
1160 gen_int_mode (base_align_bias
1161 - base_offset
, Pmode
),
1162 NULL_RTX
, 1, OPTAB_DIRECT
));
1164 mem
= gen_rtx_MEM (ptr_mode
, base
);
1165 mem
= adjust_address (mem
, VOIDmode
, base_align_bias
);
1166 emit_move_insn (mem
, gen_int_mode (ASAN_STACK_FRAME_MAGIC
, ptr_mode
));
1167 mem
= adjust_address (mem
, VOIDmode
, GET_MODE_SIZE (ptr_mode
));
1168 emit_move_insn (mem
, expand_normal (str_cst
));
1169 mem
= adjust_address (mem
, VOIDmode
, GET_MODE_SIZE (ptr_mode
));
1170 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASANPC", current_function_funcdef_no
);
1171 id
= get_identifier (buf
);
1172 decl
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
1173 VAR_DECL
, id
, char_type_node
);
1174 SET_DECL_ASSEMBLER_NAME (decl
, id
);
1175 TREE_ADDRESSABLE (decl
) = 1;
1176 TREE_READONLY (decl
) = 1;
1177 DECL_ARTIFICIAL (decl
) = 1;
1178 DECL_IGNORED_P (decl
) = 1;
1179 TREE_STATIC (decl
) = 1;
1180 TREE_PUBLIC (decl
) = 0;
1181 TREE_USED (decl
) = 1;
1182 DECL_INITIAL (decl
) = decl
;
1183 TREE_ASM_WRITTEN (decl
) = 1;
1184 TREE_ASM_WRITTEN (id
) = 1;
1185 emit_move_insn (mem
, expand_normal (build_fold_addr_expr (decl
)));
1186 shadow_base
= expand_binop (Pmode
, lshr_optab
, base
,
1187 GEN_INT (ASAN_SHADOW_SHIFT
),
1188 NULL_RTX
, 1, OPTAB_DIRECT
);
1190 = plus_constant (Pmode
, shadow_base
,
1191 asan_shadow_offset ()
1192 + (base_align_bias
>> ASAN_SHADOW_SHIFT
));
1193 gcc_assert (asan_shadow_set
!= -1
1194 && (ASAN_RED_ZONE_SIZE
>> ASAN_SHADOW_SHIFT
) == 4);
1195 shadow_mem
= gen_rtx_MEM (SImode
, shadow_base
);
1196 set_mem_alias_set (shadow_mem
, asan_shadow_set
);
1197 if (STRICT_ALIGNMENT
)
1198 set_mem_align (shadow_mem
, (GET_MODE_ALIGNMENT (SImode
)));
1199 prev_offset
= base_offset
;
1200 for (l
= length
; l
; l
-= 2)
1203 cur_shadow_byte
= ASAN_STACK_MAGIC_RIGHT
;
1204 offset
= offsets
[l
- 1];
1205 if ((offset
- base_offset
) & (ASAN_RED_ZONE_SIZE
- 1))
1209 = base_offset
+ ((offset
- base_offset
)
1210 & ~(ASAN_RED_ZONE_SIZE
- HOST_WIDE_INT_1
));
1211 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1212 (aoff
- prev_offset
)
1213 >> ASAN_SHADOW_SHIFT
);
1215 for (i
= 0; i
< 4; i
++, aoff
+= (1 << ASAN_SHADOW_SHIFT
))
1218 if (aoff
< offset
- (1 << ASAN_SHADOW_SHIFT
) + 1)
1219 shadow_bytes
[i
] = 0;
1221 shadow_bytes
[i
] = offset
- aoff
;
1224 shadow_bytes
[i
] = ASAN_STACK_MAGIC_PARTIAL
;
1225 emit_move_insn (shadow_mem
, asan_shadow_cst (shadow_bytes
));
1228 while (offset
<= offsets
[l
- 2] - ASAN_RED_ZONE_SIZE
)
1230 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1231 (offset
- prev_offset
)
1232 >> ASAN_SHADOW_SHIFT
);
1233 prev_offset
= offset
;
1234 memset (shadow_bytes
, cur_shadow_byte
, 4);
1235 emit_move_insn (shadow_mem
, asan_shadow_cst (shadow_bytes
));
1236 offset
+= ASAN_RED_ZONE_SIZE
;
1238 cur_shadow_byte
= ASAN_STACK_MAGIC_MIDDLE
;
1240 do_pending_stack_adjust ();
1242 /* Construct epilogue sequence. */
1246 if (use_after_return_class
!= -1)
1248 rtx_code_label
*lab2
= gen_label_rtx ();
1249 char c
= (char) ASAN_STACK_MAGIC_USE_AFTER_RET
;
1250 int very_likely
= REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1);
1251 emit_cmp_and_jump_insns (orig_base
, base
, EQ
, NULL_RTX
,
1252 VOIDmode
, 0, lab2
, very_likely
);
1253 shadow_mem
= gen_rtx_MEM (BLKmode
, shadow_base
);
1254 set_mem_alias_set (shadow_mem
, asan_shadow_set
);
1255 mem
= gen_rtx_MEM (ptr_mode
, base
);
1256 mem
= adjust_address (mem
, VOIDmode
, base_align_bias
);
1257 emit_move_insn (mem
, gen_int_mode (ASAN_STACK_RETIRED_MAGIC
, ptr_mode
));
1258 unsigned HOST_WIDE_INT sz
= asan_frame_size
>> ASAN_SHADOW_SHIFT
;
1259 if (use_after_return_class
< 5
1260 && can_store_by_pieces (sz
, builtin_memset_read_str
, &c
,
1261 BITS_PER_UNIT
, true))
1262 store_by_pieces (shadow_mem
, sz
, builtin_memset_read_str
, &c
,
1263 BITS_PER_UNIT
, true, 0);
1264 else if (use_after_return_class
>= 5
1265 || !set_storage_via_setmem (shadow_mem
,
1267 gen_int_mode (c
, QImode
),
1268 BITS_PER_UNIT
, BITS_PER_UNIT
,
1271 snprintf (buf
, sizeof buf
, "__asan_stack_free_%d",
1272 use_after_return_class
);
1273 ret
= init_one_libfunc (buf
);
1274 rtx addr
= convert_memory_address (ptr_mode
, base
);
1275 rtx orig_addr
= convert_memory_address (ptr_mode
, orig_base
);
1276 emit_library_call (ret
, LCT_NORMAL
, ptr_mode
, 3, addr
, ptr_mode
,
1277 GEN_INT (asan_frame_size
+ base_align_bias
),
1278 TYPE_MODE (pointer_sized_int_node
),
1279 orig_addr
, ptr_mode
);
1281 lab
= gen_label_rtx ();
1286 shadow_mem
= gen_rtx_MEM (BLKmode
, shadow_base
);
1287 set_mem_alias_set (shadow_mem
, asan_shadow_set
);
1289 if (STRICT_ALIGNMENT
)
1290 set_mem_align (shadow_mem
, (GET_MODE_ALIGNMENT (SImode
)));
1292 prev_offset
= base_offset
;
1293 last_offset
= base_offset
;
1295 for (l
= length
; l
; l
-= 2)
1297 offset
= base_offset
+ ((offsets
[l
- 1] - base_offset
)
1298 & ~(ASAN_RED_ZONE_SIZE
- HOST_WIDE_INT_1
));
1299 if (last_offset
+ last_size
!= offset
)
1301 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1302 (last_offset
- prev_offset
)
1303 >> ASAN_SHADOW_SHIFT
);
1304 prev_offset
= last_offset
;
1305 asan_clear_shadow (shadow_mem
, last_size
>> ASAN_SHADOW_SHIFT
);
1306 last_offset
= offset
;
1309 last_size
+= base_offset
+ ((offsets
[l
- 2] - base_offset
)
1310 & ~(ASAN_RED_ZONE_SIZE
- HOST_WIDE_INT_1
))
1315 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1316 (last_offset
- prev_offset
)
1317 >> ASAN_SHADOW_SHIFT
);
1318 asan_clear_shadow (shadow_mem
, last_size
>> ASAN_SHADOW_SHIFT
);
1321 do_pending_stack_adjust ();
1325 insns
= get_insns ();
1330 /* Return true if DECL, a global var, might be overridden and needs
1331 therefore a local alias. */
1334 asan_needs_local_alias (tree decl
)
1336 return DECL_WEAK (decl
) || !targetm
.binds_local_p (decl
);
1339 /* Return true if DECL is a VAR_DECL that should be protected
1340 by Address Sanitizer, by appending a red zone with protected
1341 shadow memory after it and aligning it to at least
1342 ASAN_RED_ZONE_SIZE bytes. */
1345 asan_protect_global (tree decl
)
1352 if (TREE_CODE (decl
) == STRING_CST
)
1354 /* Instrument all STRING_CSTs except those created
1355 by asan_pp_string here. */
1356 if (shadow_ptr_types
[0] != NULL_TREE
1357 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1358 && TREE_TYPE (TREE_TYPE (decl
)) == TREE_TYPE (shadow_ptr_types
[0]))
1362 if (TREE_CODE (decl
) != VAR_DECL
1363 /* TLS vars aren't statically protectable. */
1364 || DECL_THREAD_LOCAL_P (decl
)
1365 /* Externs will be protected elsewhere. */
1366 || DECL_EXTERNAL (decl
)
1367 || !DECL_RTL_SET_P (decl
)
1368 /* Comdat vars pose an ABI problem, we can't know if
1369 the var that is selected by the linker will have
1371 || DECL_ONE_ONLY (decl
)
1372 /* Similarly for common vars. People can use -fno-common.
1373 Note: Linux kernel is built with -fno-common, so we do instrument
1374 globals there even if it is C. */
1375 || (DECL_COMMON (decl
) && TREE_PUBLIC (decl
))
1376 /* Don't protect if using user section, often vars placed
1377 into user section from multiple TUs are then assumed
1378 to be an array of such vars, putting padding in there
1379 breaks this assumption. */
1380 || (DECL_SECTION_NAME (decl
) != NULL
1381 && !symtab_node::get (decl
)->implicit_section
1382 && !section_sanitized_p (DECL_SECTION_NAME (decl
)))
1383 || DECL_SIZE (decl
) == 0
1384 || ASAN_RED_ZONE_SIZE
* BITS_PER_UNIT
> MAX_OFILE_ALIGNMENT
1385 || !valid_constant_size_p (DECL_SIZE_UNIT (decl
))
1386 || DECL_ALIGN_UNIT (decl
) > 2 * ASAN_RED_ZONE_SIZE
1387 || TREE_TYPE (decl
) == ubsan_get_source_location_type ())
1390 rtl
= DECL_RTL (decl
);
1391 if (!MEM_P (rtl
) || GET_CODE (XEXP (rtl
, 0)) != SYMBOL_REF
)
1393 symbol
= XEXP (rtl
, 0);
1395 if (CONSTANT_POOL_ADDRESS_P (symbol
)
1396 || TREE_CONSTANT_POOL_ADDRESS_P (symbol
))
1399 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl
)))
1402 #ifndef ASM_OUTPUT_DEF
1403 if (asan_needs_local_alias (decl
))
1410 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1411 IS_STORE is either 1 (for a store) or 0 (for a load). */
1414 report_error_func (bool is_store
, bool recover_p
, HOST_WIDE_INT size_in_bytes
,
1417 static enum built_in_function report
[2][2][6]
1418 = { { { BUILT_IN_ASAN_REPORT_LOAD1
, BUILT_IN_ASAN_REPORT_LOAD2
,
1419 BUILT_IN_ASAN_REPORT_LOAD4
, BUILT_IN_ASAN_REPORT_LOAD8
,
1420 BUILT_IN_ASAN_REPORT_LOAD16
, BUILT_IN_ASAN_REPORT_LOAD_N
},
1421 { BUILT_IN_ASAN_REPORT_STORE1
, BUILT_IN_ASAN_REPORT_STORE2
,
1422 BUILT_IN_ASAN_REPORT_STORE4
, BUILT_IN_ASAN_REPORT_STORE8
,
1423 BUILT_IN_ASAN_REPORT_STORE16
, BUILT_IN_ASAN_REPORT_STORE_N
} },
1424 { { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT
,
1425 BUILT_IN_ASAN_REPORT_LOAD2_NOABORT
,
1426 BUILT_IN_ASAN_REPORT_LOAD4_NOABORT
,
1427 BUILT_IN_ASAN_REPORT_LOAD8_NOABORT
,
1428 BUILT_IN_ASAN_REPORT_LOAD16_NOABORT
,
1429 BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT
},
1430 { BUILT_IN_ASAN_REPORT_STORE1_NOABORT
,
1431 BUILT_IN_ASAN_REPORT_STORE2_NOABORT
,
1432 BUILT_IN_ASAN_REPORT_STORE4_NOABORT
,
1433 BUILT_IN_ASAN_REPORT_STORE8_NOABORT
,
1434 BUILT_IN_ASAN_REPORT_STORE16_NOABORT
,
1435 BUILT_IN_ASAN_REPORT_STORE_N_NOABORT
} } };
1436 if (size_in_bytes
== -1)
1439 return builtin_decl_implicit (report
[recover_p
][is_store
][5]);
1442 int size_log2
= exact_log2 (size_in_bytes
);
1443 return builtin_decl_implicit (report
[recover_p
][is_store
][size_log2
]);
1446 /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1447 IS_STORE is either 1 (for a store) or 0 (for a load). */
1450 check_func (bool is_store
, bool recover_p
, HOST_WIDE_INT size_in_bytes
,
1453 static enum built_in_function check
[2][2][6]
1454 = { { { BUILT_IN_ASAN_LOAD1
, BUILT_IN_ASAN_LOAD2
,
1455 BUILT_IN_ASAN_LOAD4
, BUILT_IN_ASAN_LOAD8
,
1456 BUILT_IN_ASAN_LOAD16
, BUILT_IN_ASAN_LOADN
},
1457 { BUILT_IN_ASAN_STORE1
, BUILT_IN_ASAN_STORE2
,
1458 BUILT_IN_ASAN_STORE4
, BUILT_IN_ASAN_STORE8
,
1459 BUILT_IN_ASAN_STORE16
, BUILT_IN_ASAN_STOREN
} },
1460 { { BUILT_IN_ASAN_LOAD1_NOABORT
,
1461 BUILT_IN_ASAN_LOAD2_NOABORT
,
1462 BUILT_IN_ASAN_LOAD4_NOABORT
,
1463 BUILT_IN_ASAN_LOAD8_NOABORT
,
1464 BUILT_IN_ASAN_LOAD16_NOABORT
,
1465 BUILT_IN_ASAN_LOADN_NOABORT
},
1466 { BUILT_IN_ASAN_STORE1_NOABORT
,
1467 BUILT_IN_ASAN_STORE2_NOABORT
,
1468 BUILT_IN_ASAN_STORE4_NOABORT
,
1469 BUILT_IN_ASAN_STORE8_NOABORT
,
1470 BUILT_IN_ASAN_STORE16_NOABORT
,
1471 BUILT_IN_ASAN_STOREN_NOABORT
} } };
1472 if (size_in_bytes
== -1)
1475 return builtin_decl_implicit (check
[recover_p
][is_store
][5]);
1478 int size_log2
= exact_log2 (size_in_bytes
);
1479 return builtin_decl_implicit (check
[recover_p
][is_store
][size_log2
]);
1482 /* Split the current basic block and create a condition statement
1483 insertion point right before or after the statement pointed to by
1484 ITER. Return an iterator to the point at which the caller might
1485 safely insert the condition statement.
1487 THEN_BLOCK must be set to the address of an uninitialized instance
1488 of basic_block. The function will then set *THEN_BLOCK to the
1489 'then block' of the condition statement to be inserted by the
1492 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1493 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1495 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1496 block' of the condition statement to be inserted by the caller.
1498 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1499 statements starting from *ITER, and *THEN_BLOCK is a new empty
1502 *ITER is adjusted to point to always point to the first statement
1503 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1504 same as what ITER was pointing to prior to calling this function,
1505 if BEFORE_P is true; otherwise, it is its following statement. */
1507 gimple_stmt_iterator
1508 create_cond_insert_point (gimple_stmt_iterator
*iter
,
1510 bool then_more_likely_p
,
1511 bool create_then_fallthru_edge
,
1512 basic_block
*then_block
,
1513 basic_block
*fallthrough_block
)
1515 gimple_stmt_iterator gsi
= *iter
;
1517 if (!gsi_end_p (gsi
) && before_p
)
1520 basic_block cur_bb
= gsi_bb (*iter
);
1522 edge e
= split_block (cur_bb
, gsi_stmt (gsi
));
1524 /* Get a hold on the 'condition block', the 'then block' and the
1526 basic_block cond_bb
= e
->src
;
1527 basic_block fallthru_bb
= e
->dest
;
1528 basic_block then_bb
= create_empty_bb (cond_bb
);
1531 add_bb_to_loop (then_bb
, cond_bb
->loop_father
);
1532 loops_state_set (LOOPS_NEED_FIXUP
);
1535 /* Set up the newly created 'then block'. */
1536 e
= make_edge (cond_bb
, then_bb
, EDGE_TRUE_VALUE
);
1537 int fallthrough_probability
1538 = then_more_likely_p
1539 ? PROB_VERY_UNLIKELY
1540 : PROB_ALWAYS
- PROB_VERY_UNLIKELY
;
1541 e
->probability
= PROB_ALWAYS
- fallthrough_probability
;
1542 if (create_then_fallthru_edge
)
1543 make_single_succ_edge (then_bb
, fallthru_bb
, EDGE_FALLTHRU
);
1545 /* Set up the fallthrough basic block. */
1546 e
= find_edge (cond_bb
, fallthru_bb
);
1547 e
->flags
= EDGE_FALSE_VALUE
;
1548 e
->count
= cond_bb
->count
;
1549 e
->probability
= fallthrough_probability
;
1551 /* Update dominance info for the newly created then_bb; note that
1552 fallthru_bb's dominance info has already been updated by
1554 if (dom_info_available_p (CDI_DOMINATORS
))
1555 set_immediate_dominator (CDI_DOMINATORS
, then_bb
, cond_bb
);
1557 *then_block
= then_bb
;
1558 *fallthrough_block
= fallthru_bb
;
1559 *iter
= gsi_start_bb (fallthru_bb
);
1561 return gsi_last_bb (cond_bb
);
1564 /* Insert an if condition followed by a 'then block' right before the
1565 statement pointed to by ITER. The fallthrough block -- which is the
1566 else block of the condition as well as the destination of the
1567 outcoming edge of the 'then block' -- starts with the statement
1570 COND is the condition of the if.
1572 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1573 'then block' is higher than the probability of the edge to the
1576 Upon completion of the function, *THEN_BB is set to the newly
1577 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1580 *ITER is adjusted to still point to the same statement it was
1581 pointing to initially. */
1584 insert_if_then_before_iter (gcond
*cond
,
1585 gimple_stmt_iterator
*iter
,
1586 bool then_more_likely_p
,
1587 basic_block
*then_bb
,
1588 basic_block
*fallthrough_bb
)
1590 gimple_stmt_iterator cond_insert_point
=
1591 create_cond_insert_point (iter
,
1594 /*create_then_fallthru_edge=*/true,
1597 gsi_insert_after (&cond_insert_point
, cond
, GSI_NEW_STMT
);
1601 (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset (). */
1604 build_shadow_mem_access (gimple_stmt_iterator
*gsi
, location_t location
,
1605 tree base_addr
, tree shadow_ptr_type
)
1607 tree t
, uintptr_type
= TREE_TYPE (base_addr
);
1608 tree shadow_type
= TREE_TYPE (shadow_ptr_type
);
1611 t
= build_int_cst (uintptr_type
, ASAN_SHADOW_SHIFT
);
1612 g
= gimple_build_assign (make_ssa_name (uintptr_type
), RSHIFT_EXPR
,
1614 gimple_set_location (g
, location
);
1615 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
1617 t
= build_int_cst (uintptr_type
, asan_shadow_offset ());
1618 g
= gimple_build_assign (make_ssa_name (uintptr_type
), PLUS_EXPR
,
1619 gimple_assign_lhs (g
), t
);
1620 gimple_set_location (g
, location
);
1621 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
1623 g
= gimple_build_assign (make_ssa_name (shadow_ptr_type
), NOP_EXPR
,
1624 gimple_assign_lhs (g
));
1625 gimple_set_location (g
, location
);
1626 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
1628 t
= build2 (MEM_REF
, shadow_type
, gimple_assign_lhs (g
),
1629 build_int_cst (shadow_ptr_type
, 0));
1630 g
= gimple_build_assign (make_ssa_name (shadow_type
), MEM_REF
, t
);
1631 gimple_set_location (g
, location
);
1632 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
1633 return gimple_assign_lhs (g
);
1636 /* BASE can already be an SSA_NAME; in that case, do not create a
1637 new SSA_NAME for it. */
1640 maybe_create_ssa_name (location_t loc
, tree base
, gimple_stmt_iterator
*iter
,
1643 if (TREE_CODE (base
) == SSA_NAME
)
1645 gimple g
= gimple_build_assign (make_ssa_name (TREE_TYPE (base
)),
1646 TREE_CODE (base
), base
);
1647 gimple_set_location (g
, loc
);
1649 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
1651 gsi_insert_after (iter
, g
, GSI_NEW_STMT
);
1652 return gimple_assign_lhs (g
);
1655 /* LEN can already have necessary size and precision;
1656 in that case, do not create a new variable. */
1659 maybe_cast_to_ptrmode (location_t loc
, tree len
, gimple_stmt_iterator
*iter
,
1662 if (ptrofftype_p (len
))
1664 gimple g
= gimple_build_assign (make_ssa_name (pointer_sized_int_node
),
1666 gimple_set_location (g
, loc
);
1668 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
1670 gsi_insert_after (iter
, g
, GSI_NEW_STMT
);
1671 return gimple_assign_lhs (g
);
1674 /* Instrument the memory access instruction BASE. Insert new
1675 statements before or after ITER.
1677 Note that the memory access represented by BASE can be either an
1678 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1679 location. IS_STORE is TRUE for a store, FALSE for a load.
1680 BEFORE_P is TRUE for inserting the instrumentation code before
1681 ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
1682 for a scalar memory access and FALSE for memory region access.
1683 NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
1684 length. ALIGN tells alignment of accessed memory object.
1686 START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
1687 memory region have already been instrumented.
1689 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1690 statement it was pointing to prior to calling this function,
1691 otherwise, it points to the statement logically following it. */
1694 build_check_stmt (location_t loc
, tree base
, tree len
,
1695 HOST_WIDE_INT size_in_bytes
, gimple_stmt_iterator
*iter
,
1696 bool is_non_zero_len
, bool before_p
, bool is_store
,
1697 bool is_scalar_access
, unsigned int align
= 0)
1699 gimple_stmt_iterator gsi
= *iter
;
1702 gcc_assert (!(size_in_bytes
> 0 && !is_non_zero_len
));
1706 base
= unshare_expr (base
);
1707 base
= maybe_create_ssa_name (loc
, base
, &gsi
, before_p
);
1711 len
= unshare_expr (len
);
1712 len
= maybe_cast_to_ptrmode (loc
, len
, iter
, before_p
);
1716 gcc_assert (size_in_bytes
!= -1);
1717 len
= build_int_cst (pointer_sized_int_node
, size_in_bytes
);
1720 if (size_in_bytes
> 1)
1722 if ((size_in_bytes
& (size_in_bytes
- 1)) != 0
1723 || size_in_bytes
> 16)
1724 is_scalar_access
= false;
1725 else if (align
&& align
< size_in_bytes
* BITS_PER_UNIT
)
1727 /* On non-strict alignment targets, if
1728 16-byte access is just 8-byte aligned,
1729 this will result in misaligned shadow
1730 memory 2 byte load, but otherwise can
1731 be handled using one read. */
1732 if (size_in_bytes
!= 16
1734 || align
< 8 * BITS_PER_UNIT
)
1735 is_scalar_access
= false;
1739 HOST_WIDE_INT flags
= 0;
1741 flags
|= ASAN_CHECK_STORE
;
1742 if (is_non_zero_len
)
1743 flags
|= ASAN_CHECK_NON_ZERO_LEN
;
1744 if (is_scalar_access
)
1745 flags
|= ASAN_CHECK_SCALAR_ACCESS
;
1747 g
= gimple_build_call_internal (IFN_ASAN_CHECK
, 4,
1748 build_int_cst (integer_type_node
, flags
),
1750 build_int_cst (integer_type_node
,
1751 align
/ BITS_PER_UNIT
));
1752 gimple_set_location (g
, loc
);
1754 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
1757 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1763 /* If T represents a memory access, add instrumentation code before ITER.
1764 LOCATION is source code location.
1765 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
1768 instrument_derefs (gimple_stmt_iterator
*iter
, tree t
,
1769 location_t location
, bool is_store
)
1771 if (is_store
&& !ASAN_INSTRUMENT_WRITES
)
1773 if (!is_store
&& !ASAN_INSTRUMENT_READS
)
1777 HOST_WIDE_INT size_in_bytes
;
1779 type
= TREE_TYPE (t
);
1780 switch (TREE_CODE (t
))
1794 size_in_bytes
= int_size_in_bytes (type
);
1795 if (size_in_bytes
<= 0)
1798 HOST_WIDE_INT bitsize
, bitpos
;
1801 int volatilep
= 0, unsignedp
= 0;
1802 tree inner
= get_inner_reference (t
, &bitsize
, &bitpos
, &offset
,
1803 &mode
, &unsignedp
, &volatilep
, false);
1805 if (TREE_CODE (t
) == COMPONENT_REF
1806 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t
, 1)) != NULL_TREE
)
1808 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t
, 1));
1809 instrument_derefs (iter
, build3 (COMPONENT_REF
, TREE_TYPE (repr
),
1810 TREE_OPERAND (t
, 0), repr
,
1811 NULL_TREE
), location
, is_store
);
1815 if (bitpos
% BITS_PER_UNIT
1816 || bitsize
!= size_in_bytes
* BITS_PER_UNIT
)
1819 if (TREE_CODE (inner
) == VAR_DECL
1820 && offset
== NULL_TREE
1822 && DECL_SIZE (inner
)
1823 && tree_fits_shwi_p (DECL_SIZE (inner
))
1824 && bitpos
+ bitsize
<= tree_to_shwi (DECL_SIZE (inner
)))
1826 if (DECL_THREAD_LOCAL_P (inner
))
1828 if (!ASAN_GLOBALS
&& is_global_var (inner
))
1830 if (!TREE_STATIC (inner
))
1832 /* Automatic vars in the current function will be always
1834 if (decl_function_context (inner
) == current_function_decl
)
1837 /* Always instrument external vars, they might be dynamically
1839 else if (!DECL_EXTERNAL (inner
))
1841 /* For static vars if they are known not to be dynamically
1842 initialized, they will be always accessible. */
1843 varpool_node
*vnode
= varpool_node::get (inner
);
1844 if (vnode
&& !vnode
->dynamically_initialized
)
1849 base
= build_fold_addr_expr (t
);
1850 if (!has_mem_ref_been_instrumented (base
, size_in_bytes
))
1852 unsigned int align
= get_object_alignment (t
);
1853 build_check_stmt (location
, base
, NULL_TREE
, size_in_bytes
, iter
,
1854 /*is_non_zero_len*/size_in_bytes
> 0, /*before_p=*/true,
1855 is_store
, /*is_scalar_access*/true, align
);
1856 update_mem_ref_hash_table (base
, size_in_bytes
);
1857 update_mem_ref_hash_table (t
, size_in_bytes
);
1862 /* Insert a memory reference into the hash table if access length
1863 can be determined in compile time. */
1866 maybe_update_mem_ref_hash_table (tree base
, tree len
)
1868 if (!POINTER_TYPE_P (TREE_TYPE (base
))
1869 || !INTEGRAL_TYPE_P (TREE_TYPE (len
)))
1872 HOST_WIDE_INT size_in_bytes
= tree_fits_shwi_p (len
) ? tree_to_shwi (len
) : -1;
1874 if (size_in_bytes
!= -1)
1875 update_mem_ref_hash_table (base
, size_in_bytes
);
1878 /* Instrument an access to a contiguous memory region that starts at
1879 the address pointed to by BASE, over a length of LEN (expressed in
1880 the sizeof (*BASE) bytes). ITER points to the instruction before
1881 which the instrumentation instructions must be inserted. LOCATION
1882 is the source location that the instrumentation instructions must
1883 have. If IS_STORE is true, then the memory access is a store;
1884 otherwise, it's a load. */
1887 instrument_mem_region_access (tree base
, tree len
,
1888 gimple_stmt_iterator
*iter
,
1889 location_t location
, bool is_store
)
1891 if (!POINTER_TYPE_P (TREE_TYPE (base
))
1892 || !INTEGRAL_TYPE_P (TREE_TYPE (len
))
1893 || integer_zerop (len
))
1896 HOST_WIDE_INT size_in_bytes
= tree_fits_shwi_p (len
) ? tree_to_shwi (len
) : -1;
1898 if ((size_in_bytes
== -1)
1899 || !has_mem_ref_been_instrumented (base
, size_in_bytes
))
1901 build_check_stmt (location
, base
, len
, size_in_bytes
, iter
,
1902 /*is_non_zero_len*/size_in_bytes
> 0, /*before_p*/true,
1903 is_store
, /*is_scalar_access*/false, /*align*/0);
1906 maybe_update_mem_ref_hash_table (base
, len
);
1907 *iter
= gsi_for_stmt (gsi_stmt (*iter
));
1910 /* Instrument the call to a built-in memory access function that is
1911 pointed to by the iterator ITER.
1913 Upon completion, return TRUE iff *ITER has been advanced to the
1914 statement following the one it was originally pointing to. */
1917 instrument_builtin_call (gimple_stmt_iterator
*iter
)
1919 if (!ASAN_MEMINTRIN
)
1922 bool iter_advanced_p
= false;
1923 gcall
*call
= as_a
<gcall
*> (gsi_stmt (*iter
));
1925 gcc_checking_assert (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
));
1927 location_t loc
= gimple_location (call
);
1929 asan_mem_ref src0
, src1
, dest
;
1930 asan_mem_ref_init (&src0
, NULL
, 1);
1931 asan_mem_ref_init (&src1
, NULL
, 1);
1932 asan_mem_ref_init (&dest
, NULL
, 1);
1934 tree src0_len
= NULL_TREE
, src1_len
= NULL_TREE
, dest_len
= NULL_TREE
;
1935 bool src0_is_store
= false, src1_is_store
= false, dest_is_store
= false,
1936 dest_is_deref
= false, intercepted_p
= true;
1938 if (get_mem_refs_of_builtin_call (call
,
1939 &src0
, &src0_len
, &src0_is_store
,
1940 &src1
, &src1_len
, &src1_is_store
,
1941 &dest
, &dest_len
, &dest_is_store
,
1942 &dest_is_deref
, &intercepted_p
))
1946 instrument_derefs (iter
, dest
.start
, loc
, dest_is_store
);
1948 iter_advanced_p
= true;
1950 else if (!intercepted_p
1951 && (src0_len
|| src1_len
|| dest_len
))
1953 if (src0
.start
!= NULL_TREE
)
1954 instrument_mem_region_access (src0
.start
, src0_len
,
1955 iter
, loc
, /*is_store=*/false);
1956 if (src1
.start
!= NULL_TREE
)
1957 instrument_mem_region_access (src1
.start
, src1_len
,
1958 iter
, loc
, /*is_store=*/false);
1959 if (dest
.start
!= NULL_TREE
)
1960 instrument_mem_region_access (dest
.start
, dest_len
,
1961 iter
, loc
, /*is_store=*/true);
1963 *iter
= gsi_for_stmt (call
);
1965 iter_advanced_p
= true;
1969 if (src0
.start
!= NULL_TREE
)
1970 maybe_update_mem_ref_hash_table (src0
.start
, src0_len
);
1971 if (src1
.start
!= NULL_TREE
)
1972 maybe_update_mem_ref_hash_table (src1
.start
, src1_len
);
1973 if (dest
.start
!= NULL_TREE
)
1974 maybe_update_mem_ref_hash_table (dest
.start
, dest_len
);
1977 return iter_advanced_p
;
1980 /* Instrument the assignment statement ITER if it is subject to
1981 instrumentation. Return TRUE iff instrumentation actually
1982 happened. In that case, the iterator ITER is advanced to the next
1983 logical expression following the one initially pointed to by ITER,
1984 and the relevant memory reference that which access has been
1985 instrumented is added to the memory references hash table. */
1988 maybe_instrument_assignment (gimple_stmt_iterator
*iter
)
1990 gimple s
= gsi_stmt (*iter
);
1992 gcc_assert (gimple_assign_single_p (s
));
1994 tree ref_expr
= NULL_TREE
;
1995 bool is_store
, is_instrumented
= false;
1997 if (gimple_store_p (s
))
1999 ref_expr
= gimple_assign_lhs (s
);
2001 instrument_derefs (iter
, ref_expr
,
2002 gimple_location (s
),
2004 is_instrumented
= true;
2007 if (gimple_assign_load_p (s
))
2009 ref_expr
= gimple_assign_rhs1 (s
);
2011 instrument_derefs (iter
, ref_expr
,
2012 gimple_location (s
),
2014 is_instrumented
= true;
2017 if (is_instrumented
)
2020 return is_instrumented
;
2023 /* Instrument the function call pointed to by the iterator ITER, if it
2024 is subject to instrumentation. At the moment, the only function
2025 calls that are instrumented are some built-in functions that access
2026 memory. Look at instrument_builtin_call to learn more.
2028 Upon completion return TRUE iff *ITER was advanced to the statement
2029 following the one it was originally pointing to. */
2032 maybe_instrument_call (gimple_stmt_iterator
*iter
)
2034 gimple stmt
= gsi_stmt (*iter
);
2035 bool is_builtin
= gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
);
2037 if (is_builtin
&& instrument_builtin_call (iter
))
2040 if (gimple_call_noreturn_p (stmt
))
2044 tree callee
= gimple_call_fndecl (stmt
);
2045 switch (DECL_FUNCTION_CODE (callee
))
2047 case BUILT_IN_UNREACHABLE
:
2049 /* Don't instrument these. */
2055 tree decl
= builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN
);
2056 gimple g
= gimple_build_call (decl
, 0);
2057 gimple_set_location (g
, gimple_location (stmt
));
2058 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
2063 /* Walk each instruction of all basic block and instrument those that
2064 represent memory references: loads, stores, or function calls.
2065 In a given basic block, this function avoids instrumenting memory
2066 references that have already been instrumented. */
2069 transform_statements (void)
2071 basic_block bb
, last_bb
= NULL
;
2072 gimple_stmt_iterator i
;
2073 int saved_last_basic_block
= last_basic_block_for_fn (cfun
);
2075 FOR_EACH_BB_FN (bb
, cfun
)
2077 basic_block prev_bb
= bb
;
2079 if (bb
->index
>= saved_last_basic_block
) continue;
2081 /* Flush the mem ref hash table, if current bb doesn't have
2082 exactly one predecessor, or if that predecessor (skipping
2083 over asan created basic blocks) isn't the last processed
2084 basic block. Thus we effectively flush on extended basic
2085 block boundaries. */
2086 while (single_pred_p (prev_bb
))
2088 prev_bb
= single_pred (prev_bb
);
2089 if (prev_bb
->index
< saved_last_basic_block
)
2092 if (prev_bb
!= last_bb
)
2093 empty_mem_ref_hash_table ();
2096 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
);)
2098 gimple s
= gsi_stmt (i
);
2100 if (has_stmt_been_instrumented_p (s
))
2102 else if (gimple_assign_single_p (s
)
2103 && !gimple_clobber_p (s
)
2104 && maybe_instrument_assignment (&i
))
2105 /* Nothing to do as maybe_instrument_assignment advanced
2107 else if (is_gimple_call (s
) && maybe_instrument_call (&i
))
2108 /* Nothing to do as maybe_instrument_call
2109 advanced the iterator I. */;
2112 /* No instrumentation happened.
2114 If the current instruction is a function call that
2115 might free something, let's forget about the memory
2116 references that got instrumented. Otherwise we might
2117 miss some instrumentation opportunities. */
2118 if (is_gimple_call (s
) && !nonfreeing_call_p (s
))
2119 empty_mem_ref_hash_table ();
2125 free_mem_ref_resources ();
2129 __asan_before_dynamic_init (module_name)
2131 __asan_after_dynamic_init ()
2135 asan_dynamic_init_call (bool after_p
)
2137 tree fn
= builtin_decl_implicit (after_p
2138 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2139 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT
);
2140 tree module_name_cst
= NULL_TREE
;
2143 pretty_printer module_name_pp
;
2144 pp_string (&module_name_pp
, main_input_filename
);
2146 if (shadow_ptr_types
[0] == NULL_TREE
)
2147 asan_init_shadow_ptr_types ();
2148 module_name_cst
= asan_pp_string (&module_name_pp
);
2149 module_name_cst
= fold_convert (const_ptr_type_node
,
2153 return build_call_expr (fn
, after_p
? 0 : 1, module_name_cst
);
2157 struct __asan_global
2161 uptr __size_with_redzone;
2163 const void *__module_name;
2164 uptr __has_dynamic_init;
2165 __asan_global_source_location *__location;
2169 asan_global_struct (void)
2171 static const char *field_names
[7]
2172 = { "__beg", "__size", "__size_with_redzone",
2173 "__name", "__module_name", "__has_dynamic_init", "__location"};
2174 tree fields
[7], ret
;
2177 ret
= make_node (RECORD_TYPE
);
2178 for (i
= 0; i
< 7; i
++)
2181 = build_decl (UNKNOWN_LOCATION
, FIELD_DECL
,
2182 get_identifier (field_names
[i
]),
2183 (i
== 0 || i
== 3) ? const_ptr_type_node
2184 : pointer_sized_int_node
);
2185 DECL_CONTEXT (fields
[i
]) = ret
;
2187 DECL_CHAIN (fields
[i
- 1]) = fields
[i
];
2189 tree type_decl
= build_decl (input_location
, TYPE_DECL
,
2190 get_identifier ("__asan_global"), ret
);
2191 DECL_IGNORED_P (type_decl
) = 1;
2192 DECL_ARTIFICIAL (type_decl
) = 1;
2193 TYPE_FIELDS (ret
) = fields
[0];
2194 TYPE_NAME (ret
) = type_decl
;
2195 TYPE_STUB_DECL (ret
) = type_decl
;
2200 /* Append description of a single global DECL into vector V.
2201 TYPE is __asan_global struct type as returned by asan_global_struct. */
2204 asan_add_global (tree decl
, tree type
, vec
<constructor_elt
, va_gc
> *v
)
2206 tree init
, uptr
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
)));
2207 unsigned HOST_WIDE_INT size
;
2208 tree str_cst
, module_name_cst
, refdecl
= decl
;
2209 vec
<constructor_elt
, va_gc
> *vinner
= NULL
;
2211 pretty_printer asan_pp
, module_name_pp
;
2213 if (DECL_NAME (decl
))
2214 pp_tree_identifier (&asan_pp
, DECL_NAME (decl
));
2216 pp_string (&asan_pp
, "<unknown>");
2217 str_cst
= asan_pp_string (&asan_pp
);
2219 pp_string (&module_name_pp
, main_input_filename
);
2220 module_name_cst
= asan_pp_string (&module_name_pp
);
2222 if (asan_needs_local_alias (decl
))
2225 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASAN", vec_safe_length (v
) + 1);
2226 refdecl
= build_decl (DECL_SOURCE_LOCATION (decl
),
2227 VAR_DECL
, get_identifier (buf
), TREE_TYPE (decl
));
2228 TREE_ADDRESSABLE (refdecl
) = TREE_ADDRESSABLE (decl
);
2229 TREE_READONLY (refdecl
) = TREE_READONLY (decl
);
2230 TREE_THIS_VOLATILE (refdecl
) = TREE_THIS_VOLATILE (decl
);
2231 DECL_GIMPLE_REG_P (refdecl
) = DECL_GIMPLE_REG_P (decl
);
2232 DECL_ARTIFICIAL (refdecl
) = DECL_ARTIFICIAL (decl
);
2233 DECL_IGNORED_P (refdecl
) = DECL_IGNORED_P (decl
);
2234 TREE_STATIC (refdecl
) = 1;
2235 TREE_PUBLIC (refdecl
) = 0;
2236 TREE_USED (refdecl
) = 1;
2237 assemble_alias (refdecl
, DECL_ASSEMBLER_NAME (decl
));
2240 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2241 fold_convert (const_ptr_type_node
,
2242 build_fold_addr_expr (refdecl
)));
2243 size
= tree_to_uhwi (DECL_SIZE_UNIT (decl
));
2244 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
, build_int_cst (uptr
, size
));
2245 size
+= asan_red_zone_size (size
);
2246 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
, build_int_cst (uptr
, size
));
2247 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2248 fold_convert (const_ptr_type_node
, str_cst
));
2249 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2250 fold_convert (const_ptr_type_node
, module_name_cst
));
2251 varpool_node
*vnode
= varpool_node::get (decl
);
2252 int has_dynamic_init
= vnode
? vnode
->dynamically_initialized
: 0;
2253 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2254 build_int_cst (uptr
, has_dynamic_init
));
2255 tree locptr
= NULL_TREE
;
2256 location_t loc
= DECL_SOURCE_LOCATION (decl
);
2257 expanded_location xloc
= expand_location (loc
);
2258 if (xloc
.file
!= NULL
)
2260 static int lasanloccnt
= 0;
2262 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASANLOC", ++lasanloccnt
);
2263 tree var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
, get_identifier (buf
),
2264 ubsan_get_source_location_type ());
2265 TREE_STATIC (var
) = 1;
2266 TREE_PUBLIC (var
) = 0;
2267 DECL_ARTIFICIAL (var
) = 1;
2268 DECL_IGNORED_P (var
) = 1;
2269 pretty_printer filename_pp
;
2270 pp_string (&filename_pp
, xloc
.file
);
2271 tree str
= asan_pp_string (&filename_pp
);
2272 tree ctor
= build_constructor_va (TREE_TYPE (var
), 3,
2273 NULL_TREE
, str
, NULL_TREE
,
2274 build_int_cst (unsigned_type_node
,
2275 xloc
.line
), NULL_TREE
,
2276 build_int_cst (unsigned_type_node
,
2278 TREE_CONSTANT (ctor
) = 1;
2279 TREE_STATIC (ctor
) = 1;
2280 DECL_INITIAL (var
) = ctor
;
2281 varpool_node::finalize_decl (var
);
2282 locptr
= fold_convert (uptr
, build_fold_addr_expr (var
));
2285 locptr
= build_int_cst (uptr
, 0);
2286 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
, locptr
);
2287 init
= build_constructor (type
, vinner
);
2288 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, init
);
2291 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2293 initialize_sanitizer_builtins (void)
2297 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT
))
2300 tree BT_FN_VOID
= build_function_type_list (void_type_node
, NULL_TREE
);
2302 = build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
2303 tree BT_FN_VOID_CONST_PTR
2304 = build_function_type_list (void_type_node
, const_ptr_type_node
, NULL_TREE
);
2305 tree BT_FN_VOID_PTR_PTR
2306 = build_function_type_list (void_type_node
, ptr_type_node
,
2307 ptr_type_node
, NULL_TREE
);
2308 tree BT_FN_VOID_PTR_PTR_PTR
2309 = build_function_type_list (void_type_node
, ptr_type_node
,
2310 ptr_type_node
, ptr_type_node
, NULL_TREE
);
2311 tree BT_FN_VOID_PTR_PTRMODE
2312 = build_function_type_list (void_type_node
, ptr_type_node
,
2313 pointer_sized_int_node
, NULL_TREE
);
2315 = build_function_type_list (void_type_node
, integer_type_node
, NULL_TREE
);
2316 tree BT_FN_SIZE_CONST_PTR_INT
2317 = build_function_type_list (size_type_node
, const_ptr_type_node
,
2318 integer_type_node
, NULL_TREE
);
2319 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT
[5];
2320 tree BT_FN_IX_CONST_VPTR_INT
[5];
2321 tree BT_FN_IX_VPTR_IX_INT
[5];
2322 tree BT_FN_VOID_VPTR_IX_INT
[5];
2324 = build_pointer_type (build_qualified_type (void_type_node
,
2325 TYPE_QUAL_VOLATILE
));
2327 = build_pointer_type (build_qualified_type (void_type_node
,
2331 = lang_hooks
.types
.type_for_size (BOOL_TYPE_SIZE
, 1);
2333 for (i
= 0; i
< 5; i
++)
2335 tree ix
= build_nonstandard_integer_type (BITS_PER_UNIT
* (1 << i
), 1);
2336 BT_FN_BOOL_VPTR_PTR_IX_INT_INT
[i
]
2337 = build_function_type_list (boolt
, vptr
, ptr_type_node
, ix
,
2338 integer_type_node
, integer_type_node
,
2340 BT_FN_IX_CONST_VPTR_INT
[i
]
2341 = build_function_type_list (ix
, cvptr
, integer_type_node
, NULL_TREE
);
2342 BT_FN_IX_VPTR_IX_INT
[i
]
2343 = build_function_type_list (ix
, vptr
, ix
, integer_type_node
,
2345 BT_FN_VOID_VPTR_IX_INT
[i
]
2346 = build_function_type_list (void_type_node
, vptr
, ix
,
2347 integer_type_node
, NULL_TREE
);
2349 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2350 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2351 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2352 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2353 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2354 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2355 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2356 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2357 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2358 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2359 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2360 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2361 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2362 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2363 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2364 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2365 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2366 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2367 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2368 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2369 #undef ATTR_NOTHROW_LEAF_LIST
2370 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2371 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2372 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2373 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2374 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2375 #undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2376 #define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
2377 ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
2378 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2379 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2380 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2381 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2382 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2383 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2384 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2385 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2386 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2387 #undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
2388 #define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
2389 /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2390 #undef ATTR_PURE_NOTHROW_LEAF_LIST
2391 #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
2392 #undef DEF_SANITIZER_BUILTIN
2393 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2394 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2395 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2396 set_call_expr_flags (decl, ATTRS); \
2397 set_builtin_decl (ENUM, decl, true);
2399 #include "sanitizer.def"
2401 /* -fsanitize=object-size uses __builtin_object_size, but that might
2402 not be available for e.g. Fortran at this point. We use
2403 DEF_SANITIZER_BUILTIN here only as a convenience macro. */
2404 if ((flag_sanitize
& SANITIZE_OBJECT_SIZE
)
2405 && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE
))
2406 DEF_SANITIZER_BUILTIN (BUILT_IN_OBJECT_SIZE
, "object_size",
2407 BT_FN_SIZE_CONST_PTR_INT
,
2408 ATTR_PURE_NOTHROW_LEAF_LIST
)
2410 #undef DEF_SANITIZER_BUILTIN
2413 /* Called via htab_traverse. Count number of emitted
2414 STRING_CSTs in the constant hash table. */
2417 count_string_csts (constant_descriptor_tree
**slot
,
2418 unsigned HOST_WIDE_INT
*data
)
2420 struct constant_descriptor_tree
*desc
= *slot
;
2421 if (TREE_CODE (desc
->value
) == STRING_CST
2422 && TREE_ASM_WRITTEN (desc
->value
)
2423 && asan_protect_global (desc
->value
))
2428 /* Helper structure to pass two parameters to
2431 struct asan_add_string_csts_data
2434 vec
<constructor_elt
, va_gc
> *v
;
2437 /* Called via hash_table::traverse. Call asan_add_global
2438 on emitted STRING_CSTs from the constant hash table. */
2441 add_string_csts (constant_descriptor_tree
**slot
,
2442 asan_add_string_csts_data
*aascd
)
2444 struct constant_descriptor_tree
*desc
= *slot
;
2445 if (TREE_CODE (desc
->value
) == STRING_CST
2446 && TREE_ASM_WRITTEN (desc
->value
)
2447 && asan_protect_global (desc
->value
))
2449 asan_add_global (SYMBOL_REF_DECL (XEXP (desc
->rtl
, 0)),
2450 aascd
->type
, aascd
->v
);
2455 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2456 invoke ggc_collect. */
2457 static GTY(()) tree asan_ctor_statements
;
2459 /* Module-level instrumentation.
2460 - Insert __asan_init_vN() into the list of CTORs.
2461 - TODO: insert redzones around globals.
2465 asan_finish_file (void)
2467 varpool_node
*vnode
;
2468 unsigned HOST_WIDE_INT gcount
= 0;
2470 if (shadow_ptr_types
[0] == NULL_TREE
)
2471 asan_init_shadow_ptr_types ();
2472 /* Avoid instrumenting code in the asan ctors/dtors.
2473 We don't need to insert padding after the description strings,
2474 nor after .LASAN* array. */
2475 flag_sanitize
&= ~SANITIZE_ADDRESS
;
2477 /* For user-space we want asan constructors to run first.
2478 Linux kernel does not support priorities other than default, and the only
2479 other user of constructors is coverage. So we run with the default
2481 int priority
= flag_sanitize
& SANITIZE_USER_ADDRESS
2482 ? MAX_RESERVED_INIT_PRIORITY
- 1 : DEFAULT_INIT_PRIORITY
;
2484 if (flag_sanitize
& SANITIZE_USER_ADDRESS
)
2486 tree fn
= builtin_decl_implicit (BUILT_IN_ASAN_INIT
);
2487 append_to_statement_list (build_call_expr (fn
, 0), &asan_ctor_statements
);
2489 FOR_EACH_DEFINED_VARIABLE (vnode
)
2490 if (TREE_ASM_WRITTEN (vnode
->decl
)
2491 && asan_protect_global (vnode
->decl
))
2493 hash_table
<tree_descriptor_hasher
> *const_desc_htab
= constant_pool_htab ();
2494 const_desc_htab
->traverse
<unsigned HOST_WIDE_INT
*, count_string_csts
>
2498 tree type
= asan_global_struct (), var
, ctor
;
2499 tree dtor_statements
= NULL_TREE
;
2500 vec
<constructor_elt
, va_gc
> *v
;
2503 type
= build_array_type_nelts (type
, gcount
);
2504 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASAN", 0);
2505 var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
, get_identifier (buf
),
2507 TREE_STATIC (var
) = 1;
2508 TREE_PUBLIC (var
) = 0;
2509 DECL_ARTIFICIAL (var
) = 1;
2510 DECL_IGNORED_P (var
) = 1;
2511 vec_alloc (v
, gcount
);
2512 FOR_EACH_DEFINED_VARIABLE (vnode
)
2513 if (TREE_ASM_WRITTEN (vnode
->decl
)
2514 && asan_protect_global (vnode
->decl
))
2515 asan_add_global (vnode
->decl
, TREE_TYPE (type
), v
);
2516 struct asan_add_string_csts_data aascd
;
2517 aascd
.type
= TREE_TYPE (type
);
2519 const_desc_htab
->traverse
<asan_add_string_csts_data
*, add_string_csts
>
2521 ctor
= build_constructor (type
, v
);
2522 TREE_CONSTANT (ctor
) = 1;
2523 TREE_STATIC (ctor
) = 1;
2524 DECL_INITIAL (var
) = ctor
;
2525 varpool_node::finalize_decl (var
);
2527 tree fn
= builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS
);
2528 tree gcount_tree
= build_int_cst (pointer_sized_int_node
, gcount
);
2529 append_to_statement_list (build_call_expr (fn
, 2,
2530 build_fold_addr_expr (var
),
2532 &asan_ctor_statements
);
2534 fn
= builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS
);
2535 append_to_statement_list (build_call_expr (fn
, 2,
2536 build_fold_addr_expr (var
),
2539 cgraph_build_static_cdtor ('D', dtor_statements
, priority
);
2541 if (asan_ctor_statements
)
2542 cgraph_build_static_cdtor ('I', asan_ctor_statements
, priority
);
2543 flag_sanitize
|= SANITIZE_ADDRESS
;
2546 /* Expand the ASAN_{LOAD,STORE} builtins. */
2549 asan_expand_check_ifn (gimple_stmt_iterator
*iter
, bool use_calls
)
2551 gimple g
= gsi_stmt (*iter
);
2552 location_t loc
= gimple_location (g
);
2555 = (flag_sanitize
& flag_sanitize_recover
& SANITIZE_KERNEL_ADDRESS
) != 0;
2557 HOST_WIDE_INT flags
= tree_to_shwi (gimple_call_arg (g
, 0));
2558 gcc_assert (flags
< ASAN_CHECK_LAST
);
2559 bool is_scalar_access
= (flags
& ASAN_CHECK_SCALAR_ACCESS
) != 0;
2560 bool is_store
= (flags
& ASAN_CHECK_STORE
) != 0;
2561 bool is_non_zero_len
= (flags
& ASAN_CHECK_NON_ZERO_LEN
) != 0;
2563 tree base
= gimple_call_arg (g
, 1);
2564 tree len
= gimple_call_arg (g
, 2);
2565 HOST_WIDE_INT align
= tree_to_shwi (gimple_call_arg (g
, 3));
2567 HOST_WIDE_INT size_in_bytes
2568 = is_scalar_access
&& tree_fits_shwi_p (len
) ? tree_to_shwi (len
) : -1;
2572 /* Instrument using callbacks. */
2573 gimple g
= gimple_build_assign (make_ssa_name (pointer_sized_int_node
),
2575 gimple_set_location (g
, loc
);
2576 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
2577 tree base_addr
= gimple_assign_lhs (g
);
2580 tree fun
= check_func (is_store
, recover_p
, size_in_bytes
, &nargs
);
2582 g
= gimple_build_call (fun
, 1, base_addr
);
2585 gcc_assert (nargs
== 2);
2586 g
= gimple_build_assign (make_ssa_name (pointer_sized_int_node
),
2588 gimple_set_location (g
, loc
);
2589 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
2590 tree sz_arg
= gimple_assign_lhs (g
);
2591 g
= gimple_build_call (fun
, nargs
, base_addr
, sz_arg
);
2593 gimple_set_location (g
, loc
);
2594 gsi_replace (iter
, g
, false);
2598 HOST_WIDE_INT real_size_in_bytes
= size_in_bytes
== -1 ? 1 : size_in_bytes
;
2600 tree shadow_ptr_type
= shadow_ptr_types
[real_size_in_bytes
== 16 ? 1 : 0];
2601 tree shadow_type
= TREE_TYPE (shadow_ptr_type
);
2603 gimple_stmt_iterator gsi
= *iter
;
2605 if (!is_non_zero_len
)
2607 /* So, the length of the memory area to asan-protect is
2608 non-constant. Let's guard the generated instrumentation code
2613 //asan instrumentation code goes here.
2615 // falltrough instructions, starting with *ITER. */
2617 g
= gimple_build_cond (NE_EXPR
,
2619 build_int_cst (TREE_TYPE (len
), 0),
2620 NULL_TREE
, NULL_TREE
);
2621 gimple_set_location (g
, loc
);
2623 basic_block then_bb
, fallthrough_bb
;
2624 insert_if_then_before_iter (as_a
<gcond
*> (g
), iter
,
2625 /*then_more_likely_p=*/true,
2626 &then_bb
, &fallthrough_bb
);
2627 /* Note that fallthrough_bb starts with the statement that was
2628 pointed to by ITER. */
2630 /* The 'then block' of the 'if (len != 0) condition is where
2631 we'll generate the asan instrumentation code now. */
2632 gsi
= gsi_last_bb (then_bb
);
2635 /* Get an iterator on the point where we can add the condition
2636 statement for the instrumentation. */
2637 basic_block then_bb
, else_bb
;
2638 gsi
= create_cond_insert_point (&gsi
, /*before_p*/false,
2639 /*then_more_likely_p=*/false,
2640 /*create_then_fallthru_edge*/recover_p
,
2644 g
= gimple_build_assign (make_ssa_name (pointer_sized_int_node
),
2646 gimple_set_location (g
, loc
);
2647 gsi_insert_before (&gsi
, g
, GSI_NEW_STMT
);
2648 tree base_addr
= gimple_assign_lhs (g
);
2651 if (real_size_in_bytes
>= 8)
2653 tree shadow
= build_shadow_mem_access (&gsi
, loc
, base_addr
,
2659 /* Slow path for 1, 2 and 4 byte accesses. */
2660 /* Test (shadow != 0)
2661 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
2662 tree shadow
= build_shadow_mem_access (&gsi
, loc
, base_addr
,
2664 gimple shadow_test
= build_assign (NE_EXPR
, shadow
, 0);
2665 gimple_seq seq
= NULL
;
2666 gimple_seq_add_stmt (&seq
, shadow_test
);
2667 /* Aligned (>= 8 bytes) can test just
2668 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
2672 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
,
2674 gimple_seq_add_stmt (&seq
,
2675 build_type_cast (shadow_type
,
2676 gimple_seq_last (seq
)));
2677 if (real_size_in_bytes
> 1)
2678 gimple_seq_add_stmt (&seq
,
2679 build_assign (PLUS_EXPR
,
2680 gimple_seq_last (seq
),
2681 real_size_in_bytes
- 1));
2682 t
= gimple_assign_lhs (gimple_seq_last_stmt (seq
));
2685 t
= build_int_cst (shadow_type
, real_size_in_bytes
- 1);
2686 gimple_seq_add_stmt (&seq
, build_assign (GE_EXPR
, t
, shadow
));
2687 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
, shadow_test
,
2688 gimple_seq_last (seq
)));
2689 t
= gimple_assign_lhs (gimple_seq_last (seq
));
2690 gimple_seq_set_location (seq
, loc
);
2691 gsi_insert_seq_after (&gsi
, seq
, GSI_CONTINUE_LINKING
);
2693 /* For non-constant, misaligned or otherwise weird access sizes,
2694 check first and last byte. */
2695 if (size_in_bytes
== -1)
2697 g
= gimple_build_assign (make_ssa_name (pointer_sized_int_node
),
2699 build_int_cst (pointer_sized_int_node
, 1));
2700 gimple_set_location (g
, loc
);
2701 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
2702 tree last
= gimple_assign_lhs (g
);
2703 g
= gimple_build_assign (make_ssa_name (pointer_sized_int_node
),
2704 PLUS_EXPR
, base_addr
, last
);
2705 gimple_set_location (g
, loc
);
2706 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
2707 tree base_end_addr
= gimple_assign_lhs (g
);
2709 tree shadow
= build_shadow_mem_access (&gsi
, loc
, base_end_addr
,
2711 gimple shadow_test
= build_assign (NE_EXPR
, shadow
, 0);
2712 gimple_seq seq
= NULL
;
2713 gimple_seq_add_stmt (&seq
, shadow_test
);
2714 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
,
2716 gimple_seq_add_stmt (&seq
, build_type_cast (shadow_type
,
2717 gimple_seq_last (seq
)));
2718 gimple_seq_add_stmt (&seq
, build_assign (GE_EXPR
,
2719 gimple_seq_last (seq
),
2721 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
, shadow_test
,
2722 gimple_seq_last (seq
)));
2723 gimple_seq_add_stmt (&seq
, build_assign (BIT_IOR_EXPR
, t
,
2724 gimple_seq_last (seq
)));
2725 t
= gimple_assign_lhs (gimple_seq_last (seq
));
2726 gimple_seq_set_location (seq
, loc
);
2727 gsi_insert_seq_after (&gsi
, seq
, GSI_CONTINUE_LINKING
);
2731 g
= gimple_build_cond (NE_EXPR
, t
, build_int_cst (TREE_TYPE (t
), 0),
2732 NULL_TREE
, NULL_TREE
);
2733 gimple_set_location (g
, loc
);
2734 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
2736 /* Generate call to the run-time library (e.g. __asan_report_load8). */
2737 gsi
= gsi_start_bb (then_bb
);
2739 tree fun
= report_error_func (is_store
, recover_p
, size_in_bytes
, &nargs
);
2740 g
= gimple_build_call (fun
, nargs
, base_addr
, len
);
2741 gimple_set_location (g
, loc
);
2742 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
2744 gsi_remove (iter
, true);
2745 *iter
= gsi_start_bb (else_bb
);
2750 /* Instrument the current function. */
2753 asan_instrument (void)
2755 if (shadow_ptr_types
[0] == NULL_TREE
)
2756 asan_init_shadow_ptr_types ();
2757 transform_statements ();
2764 return (flag_sanitize
& SANITIZE_ADDRESS
) != 0
2765 && !lookup_attribute ("no_sanitize_address",
2766 DECL_ATTRIBUTES (current_function_decl
));
2771 const pass_data pass_data_asan
=
2773 GIMPLE_PASS
, /* type */
2775 OPTGROUP_NONE
, /* optinfo_flags */
2776 TV_NONE
, /* tv_id */
2777 ( PROP_ssa
| PROP_cfg
| PROP_gimple_leh
), /* properties_required */
2778 0, /* properties_provided */
2779 0, /* properties_destroyed */
2780 0, /* todo_flags_start */
2781 TODO_update_ssa
, /* todo_flags_finish */
2784 class pass_asan
: public gimple_opt_pass
2787 pass_asan (gcc::context
*ctxt
)
2788 : gimple_opt_pass (pass_data_asan
, ctxt
)
2791 /* opt_pass methods: */
2792 opt_pass
* clone () { return new pass_asan (m_ctxt
); }
2793 virtual bool gate (function
*) { return gate_asan (); }
2794 virtual unsigned int execute (function
*) { return asan_instrument (); }
2796 }; // class pass_asan
2801 make_pass_asan (gcc::context
*ctxt
)
2803 return new pass_asan (ctxt
);
2808 const pass_data pass_data_asan_O0
=
2810 GIMPLE_PASS
, /* type */
2812 OPTGROUP_NONE
, /* optinfo_flags */
2813 TV_NONE
, /* tv_id */
2814 ( PROP_ssa
| PROP_cfg
| PROP_gimple_leh
), /* properties_required */
2815 0, /* properties_provided */
2816 0, /* properties_destroyed */
2817 0, /* todo_flags_start */
2818 TODO_update_ssa
, /* todo_flags_finish */
2821 class pass_asan_O0
: public gimple_opt_pass
2824 pass_asan_O0 (gcc::context
*ctxt
)
2825 : gimple_opt_pass (pass_data_asan_O0
, ctxt
)
2828 /* opt_pass methods: */
2829 virtual bool gate (function
*) { return !optimize
&& gate_asan (); }
2830 virtual unsigned int execute (function
*) { return asan_instrument (); }
2832 }; // class pass_asan_O0
2837 make_pass_asan_O0 (gcc::context
*ctxt
)
2839 return new pass_asan_O0 (ctxt
);
2842 #include "gt-asan.h"