1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2014 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
26 #include "hash-table.h"
27 #include "basic-block.h"
28 #include "tree-ssa-alias.h"
29 #include "internal-fn.h"
30 #include "gimple-expr.h"
34 #include "gimple-iterator.h"
37 #include "stor-layout.h"
38 #include "tree-iterator.h"
40 #include "stringpool.h"
41 #include "tree-ssanames.h"
42 #include "tree-pass.h"
44 #include "gimple-pretty-print.h"
50 #include "langhooks.h"
51 #include "alloc-pool.h"
53 #include "gimple-builder.h"
58 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
59 with <2x slowdown on average.
61 The tool consists of two parts:
62 instrumentation module (this file) and a run-time library.
63 The instrumentation module adds a run-time check before every memory insn.
64 For a 8- or 16- byte load accessing address X:
65 ShadowAddr = (X >> 3) + Offset
66 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
68 __asan_report_load8(X);
69 For a load of N bytes (N=1, 2 or 4) from address X:
70 ShadowAddr = (X >> 3) + Offset
71 ShadowValue = *(char*)ShadowAddr;
73 if ((X & 7) + N - 1 > ShadowValue)
74 __asan_report_loadN(X);
75 Stores are instrumented similarly, but using __asan_report_storeN functions.
76 A call too __asan_init_vN() is inserted to the list of module CTORs.
77 N is the version number of the AddressSanitizer API. The changes between the
78 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
80 The run-time library redefines malloc (so that redzone are inserted around
81 the allocated memory) and free (so that reuse of free-ed memory is delayed),
82 provides __asan_report* and __asan_init_vN functions.
85 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
87 The current implementation supports detection of out-of-bounds and
88 use-after-free in the heap, on the stack and for global variables.
90 [Protection of stack variables]
92 To understand how detection of out-of-bounds and use-after-free works
93 for stack variables, lets look at this example on x86_64 where the
108 For this function, the stack protected by asan will be organized as
109 follows, from the top of the stack to the bottom:
111 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
113 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
114 the next slot be 32 bytes aligned; this one is called Partial
115 Redzone; this 32 bytes alignment is an asan constraint]
117 Slot 3/ [24 bytes for variable 'a']
119 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
121 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
123 Slot 6/ [8 bytes for variable 'b']
125 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
128 The 32 bytes of LEFT red zone at the bottom of the stack can be
131 1/ The first 8 bytes contain a magical asan number that is always
134 2/ The following 8 bytes contains a pointer to a string (to be
135 parsed at runtime by the runtime asan library), which format is
138 "<function-name> <space> <num-of-variables-on-the-stack>
139 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
140 <length-of-var-in-bytes> ){n} "
142 where '(...){n}' means the content inside the parenthesis occurs 'n'
143 times, with 'n' being the number of variables on the stack.
145 3/ The following 8 bytes contain the PC of the current function which
146 will be used by the run-time library to print an error message.
148 4/ The following 8 bytes are reserved for internal use by the run-time.
150 The shadow memory for that stack layout is going to look like this:
152 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
153 The F1 byte pattern is a magic number called
154 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
155 the memory for that shadow byte is part of a the LEFT red zone
156 intended to seat at the bottom of the variables on the stack.
158 - content of shadow memory 8 bytes for slots 6 and 5:
159 0xF4F4F400. The F4 byte pattern is a magic number
160 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
161 memory region for this shadow byte is a PARTIAL red zone
162 intended to pad a variable A, so that the slot following
163 {A,padding} is 32 bytes aligned.
165 Note that the fact that the least significant byte of this
166 shadow memory content is 00 means that 8 bytes of its
167 corresponding memory (which corresponds to the memory of
168 variable 'b') is addressable.
170 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
171 The F2 byte pattern is a magic number called
172 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
173 region for this shadow byte is a MIDDLE red zone intended to
174 seat between two 32 aligned slots of {variable,padding}.
176 - content of shadow memory 8 bytes for slot 3 and 2:
177 0xF4000000. This represents is the concatenation of
178 variable 'a' and the partial red zone following it, like what we
179 had for variable 'b'. The least significant 3 bytes being 00
180 means that the 3 bytes of variable 'a' are addressable.
182 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
183 The F3 byte pattern is a magic number called
184 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
185 region for this shadow byte is a RIGHT red zone intended to seat
186 at the top of the variables of the stack.
188 Note that the real variable layout is done in expand_used_vars in
189 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
190 stack variables as well as the different red zones, emits some
191 prologue code to populate the shadow memory as to poison (mark as
192 non-accessible) the regions of the red zones and mark the regions of
193 stack variables as accessible, and emit some epilogue code to
194 un-poison (mark as accessible) the regions of red zones right before
197 [Protection of global variables]
199 The basic idea is to insert a red zone between two global variables
200 and install a constructor function that calls the asan runtime to do
201 the populating of the relevant shadow memory regions at load time.
203 So the global variables are laid out as to insert a red zone between
204 them. The size of the red zones is so that each variable starts on a
207 Then a constructor function is installed so that, for each global
208 variable, it calls the runtime asan library function
209 __asan_register_globals_with an instance of this type:
213 // Address of the beginning of the global variable.
216 // Initial size of the global variable.
219 // Size of the global variable + size of the red zone. This
220 // size is 32 bytes aligned.
221 uptr __size_with_redzone;
223 // Name of the global variable.
226 // Name of the module where the global variable is declared.
227 const void *__module_name;
229 // 1 if it has dynamic initialization, 0 otherwise.
230 uptr __has_dynamic_init;
233 A destructor function that calls the runtime asan library function
234 _asan_unregister_globals is also installed. */
236 alias_set_type asan_shadow_set
= -1;
238 /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
239 alias set is used for all shadow memory accesses. */
240 static GTY(()) tree shadow_ptr_types
[2];
242 /* Decl for __asan_option_detect_stack_use_after_return. */
243 static GTY(()) tree asan_detect_stack_use_after_return
;
245 /* Hashtable support for memory references used by gimple
248 /* This type represents a reference to a memory region. */
251 /* The expression of the beginning of the memory region. */
254 /* The size of the access. */
255 HOST_WIDE_INT access_size
;
258 static alloc_pool asan_mem_ref_alloc_pool
;
260 /* This creates the alloc pool used to store the instances of
261 asan_mem_ref that are stored in the hash table asan_mem_ref_ht. */
264 asan_mem_ref_get_alloc_pool ()
266 if (asan_mem_ref_alloc_pool
== NULL
)
267 asan_mem_ref_alloc_pool
= create_alloc_pool ("asan_mem_ref",
268 sizeof (asan_mem_ref
),
270 return asan_mem_ref_alloc_pool
;
274 /* Initializes an instance of asan_mem_ref. */
277 asan_mem_ref_init (asan_mem_ref
*ref
, tree start
, HOST_WIDE_INT access_size
)
280 ref
->access_size
= access_size
;
283 /* Allocates memory for an instance of asan_mem_ref into the memory
284 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
285 START is the address of (or the expression pointing to) the
286 beginning of memory reference. ACCESS_SIZE is the size of the
287 access to the referenced memory. */
290 asan_mem_ref_new (tree start
, HOST_WIDE_INT access_size
)
293 (asan_mem_ref
*) pool_alloc (asan_mem_ref_get_alloc_pool ());
295 asan_mem_ref_init (ref
, start
, access_size
);
299 /* This builds and returns a pointer to the end of the memory region
300 that starts at START and of length LEN. */
303 asan_mem_ref_get_end (tree start
, tree len
)
305 if (len
== NULL_TREE
|| integer_zerop (len
))
308 return fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (start
), start
, len
);
311 /* Return a tree expression that represents the end of the referenced
312 memory region. Beware that this function can actually build a new
316 asan_mem_ref_get_end (const asan_mem_ref
*ref
, tree len
)
318 return asan_mem_ref_get_end (ref
->start
, len
);
321 struct asan_mem_ref_hasher
322 : typed_noop_remove
<asan_mem_ref
>
324 typedef asan_mem_ref value_type
;
325 typedef asan_mem_ref compare_type
;
327 static inline hashval_t
hash (const value_type
*);
328 static inline bool equal (const value_type
*, const compare_type
*);
331 /* Hash a memory reference. */
334 asan_mem_ref_hasher::hash (const asan_mem_ref
*mem_ref
)
336 hashval_t h
= iterative_hash_expr (mem_ref
->start
, 0);
337 h
= iterative_hash_host_wide_int (mem_ref
->access_size
, h
);
341 /* Compare two memory references. We accept the length of either
342 memory references to be NULL_TREE. */
345 asan_mem_ref_hasher::equal (const asan_mem_ref
*m1
,
346 const asan_mem_ref
*m2
)
348 return (m1
->access_size
== m2
->access_size
349 && operand_equal_p (m1
->start
, m2
->start
, 0));
352 static hash_table
<asan_mem_ref_hasher
> asan_mem_ref_ht
;
354 /* Returns a reference to the hash table containing memory references.
355 This function ensures that the hash table is created. Note that
356 this hash table is updated by the function
357 update_mem_ref_hash_table. */
359 static hash_table
<asan_mem_ref_hasher
> &
360 get_mem_ref_hash_table ()
362 if (!asan_mem_ref_ht
.is_created ())
363 asan_mem_ref_ht
.create (10);
365 return asan_mem_ref_ht
;
368 /* Clear all entries from the memory references hash table. */
371 empty_mem_ref_hash_table ()
373 if (asan_mem_ref_ht
.is_created ())
374 asan_mem_ref_ht
.empty ();
377 /* Free the memory references hash table. */
380 free_mem_ref_resources ()
382 if (asan_mem_ref_ht
.is_created ())
383 asan_mem_ref_ht
.dispose ();
385 if (asan_mem_ref_alloc_pool
)
387 free_alloc_pool (asan_mem_ref_alloc_pool
);
388 asan_mem_ref_alloc_pool
= NULL
;
392 /* Return true iff the memory reference REF has been instrumented. */
395 has_mem_ref_been_instrumented (tree ref
, HOST_WIDE_INT access_size
)
398 asan_mem_ref_init (&r
, ref
, access_size
);
400 return (get_mem_ref_hash_table ().find (&r
) != NULL
);
403 /* Return true iff the memory reference REF has been instrumented. */
406 has_mem_ref_been_instrumented (const asan_mem_ref
*ref
)
408 return has_mem_ref_been_instrumented (ref
->start
, ref
->access_size
);
411 /* Return true iff access to memory region starting at REF and of
412 length LEN has been instrumented. */
415 has_mem_ref_been_instrumented (const asan_mem_ref
*ref
, tree len
)
417 /* First let's see if the address of the beginning of REF has been
419 if (!has_mem_ref_been_instrumented (ref
))
424 /* Let's see if the end of the region has been instrumented. */
425 if (!has_mem_ref_been_instrumented (asan_mem_ref_get_end (ref
, len
),
432 /* Set REF to the memory reference present in a gimple assignment
433 ASSIGNMENT. Return true upon successful completion, false
437 get_mem_ref_of_assignment (const gimple assignment
,
441 gcc_assert (gimple_assign_single_p (assignment
));
443 if (gimple_store_p (assignment
)
444 && !gimple_clobber_p (assignment
))
446 ref
->start
= gimple_assign_lhs (assignment
);
447 *ref_is_store
= true;
449 else if (gimple_assign_load_p (assignment
))
451 ref
->start
= gimple_assign_rhs1 (assignment
);
452 *ref_is_store
= false;
457 ref
->access_size
= int_size_in_bytes (TREE_TYPE (ref
->start
));
461 /* Return the memory references contained in a gimple statement
462 representing a builtin call that has to do with memory access. */
465 get_mem_refs_of_builtin_call (const gimple call
,
477 gcc_checking_assert (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
));
479 tree callee
= gimple_call_fndecl (call
);
480 tree source0
= NULL_TREE
, source1
= NULL_TREE
,
481 dest
= NULL_TREE
, len
= NULL_TREE
;
482 bool is_store
= true, got_reference_p
= false;
483 HOST_WIDE_INT access_size
= 1;
485 switch (DECL_FUNCTION_CODE (callee
))
487 /* (s, s, n) style memops. */
489 case BUILT_IN_MEMCMP
:
490 source0
= gimple_call_arg (call
, 0);
491 source1
= gimple_call_arg (call
, 1);
492 len
= gimple_call_arg (call
, 2);
495 /* (src, dest, n) style memops. */
497 source0
= gimple_call_arg (call
, 0);
498 dest
= gimple_call_arg (call
, 1);
499 len
= gimple_call_arg (call
, 2);
502 /* (dest, src, n) style memops. */
503 case BUILT_IN_MEMCPY
:
504 case BUILT_IN_MEMCPY_CHK
:
505 case BUILT_IN_MEMMOVE
:
506 case BUILT_IN_MEMMOVE_CHK
:
507 case BUILT_IN_MEMPCPY
:
508 case BUILT_IN_MEMPCPY_CHK
:
509 dest
= gimple_call_arg (call
, 0);
510 source0
= gimple_call_arg (call
, 1);
511 len
= gimple_call_arg (call
, 2);
514 /* (dest, n) style memops. */
516 dest
= gimple_call_arg (call
, 0);
517 len
= gimple_call_arg (call
, 1);
520 /* (dest, x, n) style memops*/
521 case BUILT_IN_MEMSET
:
522 case BUILT_IN_MEMSET_CHK
:
523 dest
= gimple_call_arg (call
, 0);
524 len
= gimple_call_arg (call
, 2);
527 case BUILT_IN_STRLEN
:
528 source0
= gimple_call_arg (call
, 0);
529 len
= gimple_call_lhs (call
);
532 /* And now the __atomic* and __sync builtins.
533 These are handled differently from the classical memory memory
534 access builtins above. */
536 case BUILT_IN_ATOMIC_LOAD_1
:
537 case BUILT_IN_ATOMIC_LOAD_2
:
538 case BUILT_IN_ATOMIC_LOAD_4
:
539 case BUILT_IN_ATOMIC_LOAD_8
:
540 case BUILT_IN_ATOMIC_LOAD_16
:
544 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
545 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
546 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
547 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
548 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
550 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
551 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
552 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
553 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
554 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
556 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
557 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
558 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
559 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
560 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
562 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
563 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
564 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
565 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
566 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
568 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
569 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
570 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
571 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
572 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
574 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
575 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
576 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
577 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
579 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
580 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
581 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
582 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
583 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
585 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
586 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
587 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
588 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
589 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
591 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
592 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
593 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
594 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
595 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
597 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
598 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
599 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
600 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
601 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
603 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
604 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
605 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
606 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
607 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
609 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
610 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
611 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
612 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
614 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
615 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
616 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
617 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
618 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
620 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
621 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
622 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
623 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
624 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
626 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
627 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
628 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
629 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
630 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
632 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
633 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
634 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
635 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
636 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
638 case BUILT_IN_ATOMIC_EXCHANGE_1
:
639 case BUILT_IN_ATOMIC_EXCHANGE_2
:
640 case BUILT_IN_ATOMIC_EXCHANGE_4
:
641 case BUILT_IN_ATOMIC_EXCHANGE_8
:
642 case BUILT_IN_ATOMIC_EXCHANGE_16
:
644 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
645 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
646 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
647 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
648 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
650 case BUILT_IN_ATOMIC_STORE_1
:
651 case BUILT_IN_ATOMIC_STORE_2
:
652 case BUILT_IN_ATOMIC_STORE_4
:
653 case BUILT_IN_ATOMIC_STORE_8
:
654 case BUILT_IN_ATOMIC_STORE_16
:
656 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
657 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
658 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
659 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
660 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
662 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
663 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
664 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
665 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
666 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
668 case BUILT_IN_ATOMIC_AND_FETCH_1
:
669 case BUILT_IN_ATOMIC_AND_FETCH_2
:
670 case BUILT_IN_ATOMIC_AND_FETCH_4
:
671 case BUILT_IN_ATOMIC_AND_FETCH_8
:
672 case BUILT_IN_ATOMIC_AND_FETCH_16
:
674 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
675 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
676 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
677 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
678 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
680 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
681 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
682 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
683 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
684 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
686 case BUILT_IN_ATOMIC_OR_FETCH_1
:
687 case BUILT_IN_ATOMIC_OR_FETCH_2
:
688 case BUILT_IN_ATOMIC_OR_FETCH_4
:
689 case BUILT_IN_ATOMIC_OR_FETCH_8
:
690 case BUILT_IN_ATOMIC_OR_FETCH_16
:
692 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
693 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
694 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
695 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
696 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
698 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
699 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
700 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
701 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
702 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
704 case BUILT_IN_ATOMIC_FETCH_AND_1
:
705 case BUILT_IN_ATOMIC_FETCH_AND_2
:
706 case BUILT_IN_ATOMIC_FETCH_AND_4
:
707 case BUILT_IN_ATOMIC_FETCH_AND_8
:
708 case BUILT_IN_ATOMIC_FETCH_AND_16
:
710 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
711 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
712 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
713 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
714 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
716 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
717 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
718 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
719 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
720 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
722 case BUILT_IN_ATOMIC_FETCH_OR_1
:
723 case BUILT_IN_ATOMIC_FETCH_OR_2
:
724 case BUILT_IN_ATOMIC_FETCH_OR_4
:
725 case BUILT_IN_ATOMIC_FETCH_OR_8
:
726 case BUILT_IN_ATOMIC_FETCH_OR_16
:
728 dest
= gimple_call_arg (call
, 0);
729 /* DEST represents the address of a memory location.
730 instrument_derefs wants the memory location, so lets
731 dereference the address DEST before handing it to
732 instrument_derefs. */
733 if (TREE_CODE (dest
) == ADDR_EXPR
)
734 dest
= TREE_OPERAND (dest
, 0);
735 else if (TREE_CODE (dest
) == SSA_NAME
|| TREE_CODE (dest
) == INTEGER_CST
)
736 dest
= build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (dest
)),
737 dest
, build_int_cst (TREE_TYPE (dest
), 0));
741 access_size
= int_size_in_bytes (TREE_TYPE (dest
));
745 /* The other builtins memory access are not instrumented in this
746 function because they either don't have any length parameter,
747 or their length parameter is just a limit. */
751 if (len
!= NULL_TREE
)
753 if (source0
!= NULL_TREE
)
755 src0
->start
= source0
;
756 src0
->access_size
= access_size
;
758 *src0_is_store
= false;
761 if (source1
!= NULL_TREE
)
763 src1
->start
= source1
;
764 src1
->access_size
= access_size
;
766 *src1_is_store
= false;
769 if (dest
!= NULL_TREE
)
772 dst
->access_size
= access_size
;
774 *dst_is_store
= true;
777 got_reference_p
= true;
782 dst
->access_size
= access_size
;
783 *dst_len
= NULL_TREE
;
784 *dst_is_store
= is_store
;
785 *dest_is_deref
= true;
786 got_reference_p
= true;
789 return got_reference_p
;
792 /* Return true iff a given gimple statement has been instrumented.
793 Note that the statement is "defined" by the memory references it
797 has_stmt_been_instrumented_p (gimple stmt
)
799 if (gimple_assign_single_p (stmt
))
803 asan_mem_ref_init (&r
, NULL
, 1);
805 if (get_mem_ref_of_assignment (stmt
, &r
, &r_is_store
))
806 return has_mem_ref_been_instrumented (&r
);
808 else if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
810 asan_mem_ref src0
, src1
, dest
;
811 asan_mem_ref_init (&src0
, NULL
, 1);
812 asan_mem_ref_init (&src1
, NULL
, 1);
813 asan_mem_ref_init (&dest
, NULL
, 1);
815 tree src0_len
= NULL_TREE
, src1_len
= NULL_TREE
, dest_len
= NULL_TREE
;
816 bool src0_is_store
= false, src1_is_store
= false,
817 dest_is_store
= false, dest_is_deref
= false;
818 if (get_mem_refs_of_builtin_call (stmt
,
819 &src0
, &src0_len
, &src0_is_store
,
820 &src1
, &src1_len
, &src1_is_store
,
821 &dest
, &dest_len
, &dest_is_store
,
824 if (src0
.start
!= NULL_TREE
825 && !has_mem_ref_been_instrumented (&src0
, src0_len
))
828 if (src1
.start
!= NULL_TREE
829 && !has_mem_ref_been_instrumented (&src1
, src1_len
))
832 if (dest
.start
!= NULL_TREE
833 && !has_mem_ref_been_instrumented (&dest
, dest_len
))
842 /* Insert a memory reference into the hash table. */
845 update_mem_ref_hash_table (tree ref
, HOST_WIDE_INT access_size
)
847 hash_table
<asan_mem_ref_hasher
> ht
= get_mem_ref_hash_table ();
850 asan_mem_ref_init (&r
, ref
, access_size
);
852 asan_mem_ref
**slot
= ht
.find_slot (&r
, INSERT
);
854 *slot
= asan_mem_ref_new (ref
, access_size
);
857 /* Initialize shadow_ptr_types array. */
860 asan_init_shadow_ptr_types (void)
862 asan_shadow_set
= new_alias_set ();
863 shadow_ptr_types
[0] = build_distinct_type_copy (signed_char_type_node
);
864 TYPE_ALIAS_SET (shadow_ptr_types
[0]) = asan_shadow_set
;
865 shadow_ptr_types
[0] = build_pointer_type (shadow_ptr_types
[0]);
866 shadow_ptr_types
[1] = build_distinct_type_copy (short_integer_type_node
);
867 TYPE_ALIAS_SET (shadow_ptr_types
[1]) = asan_shadow_set
;
868 shadow_ptr_types
[1] = build_pointer_type (shadow_ptr_types
[1]);
869 initialize_sanitizer_builtins ();
872 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
875 asan_pp_string (pretty_printer
*pp
)
877 const char *buf
= pp_formatted_text (pp
);
878 size_t len
= strlen (buf
);
879 tree ret
= build_string (len
+ 1, buf
);
881 = build_array_type (TREE_TYPE (shadow_ptr_types
[0]),
882 build_index_type (size_int (len
)));
883 TREE_READONLY (ret
) = 1;
884 TREE_STATIC (ret
) = 1;
885 return build1 (ADDR_EXPR
, shadow_ptr_types
[0], ret
);
888 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
891 asan_shadow_cst (unsigned char shadow_bytes
[4])
894 unsigned HOST_WIDE_INT val
= 0;
895 gcc_assert (WORDS_BIG_ENDIAN
== BYTES_BIG_ENDIAN
);
896 for (i
= 0; i
< 4; i
++)
897 val
|= (unsigned HOST_WIDE_INT
) shadow_bytes
[BYTES_BIG_ENDIAN
? 3 - i
: i
]
898 << (BITS_PER_UNIT
* i
);
899 return gen_int_mode (val
, SImode
);
902 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
906 asan_clear_shadow (rtx shadow_mem
, HOST_WIDE_INT len
)
908 rtx insn
, insns
, top_label
, end
, addr
, tmp
, jump
;
911 clear_storage (shadow_mem
, GEN_INT (len
), BLOCK_OP_NORMAL
);
912 insns
= get_insns ();
914 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
917 if (insn
== NULL_RTX
)
923 gcc_assert ((len
& 3) == 0);
924 top_label
= gen_label_rtx ();
925 addr
= copy_to_mode_reg (Pmode
, XEXP (shadow_mem
, 0));
926 shadow_mem
= adjust_automodify_address (shadow_mem
, SImode
, addr
, 0);
927 end
= force_reg (Pmode
, plus_constant (Pmode
, addr
, len
));
928 emit_label (top_label
);
930 emit_move_insn (shadow_mem
, const0_rtx
);
931 tmp
= expand_simple_binop (Pmode
, PLUS
, addr
, gen_int_mode (4, Pmode
), addr
,
932 true, OPTAB_LIB_WIDEN
);
934 emit_move_insn (addr
, tmp
);
935 emit_cmp_and_jump_insns (addr
, end
, LT
, NULL_RTX
, Pmode
, true, top_label
);
936 jump
= get_last_insn ();
937 gcc_assert (JUMP_P (jump
));
938 add_int_reg_note (jump
, REG_BR_PROB
, REG_BR_PROB_BASE
* 80 / 100);
942 asan_function_start (void)
944 section
*fnsec
= function_section (current_function_decl
);
945 switch_to_section (fnsec
);
946 ASM_OUTPUT_DEBUG_LABEL (asm_out_file
, "LASANPC",
947 current_function_funcdef_no
);
950 /* Insert code to protect stack vars. The prologue sequence should be emitted
951 directly, epilogue sequence returned. BASE is the register holding the
952 stack base, against which OFFSETS array offsets are relative to, OFFSETS
953 array contains pairs of offsets in reverse order, always the end offset
954 of some gap that needs protection followed by starting offset,
955 and DECLS is an array of representative decls for each var partition.
956 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
957 elements long (OFFSETS include gap before the first variable as well
958 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
959 register which stack vars DECL_RTLs are based on. Either BASE should be
960 assigned to PBASE, when not doing use after return protection, or
961 corresponding address based on __asan_stack_malloc* return value. */
964 asan_emit_stack_protection (rtx base
, rtx pbase
, unsigned int alignb
,
965 HOST_WIDE_INT
*offsets
, tree
*decls
, int length
)
967 rtx shadow_base
, shadow_mem
, ret
, mem
, orig_base
, lab
;
969 unsigned char shadow_bytes
[4];
970 HOST_WIDE_INT base_offset
= offsets
[length
- 1];
971 HOST_WIDE_INT base_align_bias
= 0, offset
, prev_offset
;
972 HOST_WIDE_INT asan_frame_size
= offsets
[0] - base_offset
;
973 HOST_WIDE_INT last_offset
, last_size
;
975 unsigned char cur_shadow_byte
= ASAN_STACK_MAGIC_LEFT
;
976 tree str_cst
, decl
, id
;
977 int use_after_return_class
= -1;
979 if (shadow_ptr_types
[0] == NULL_TREE
)
980 asan_init_shadow_ptr_types ();
982 /* First of all, prepare the description string. */
983 pretty_printer asan_pp
;
985 pp_decimal_int (&asan_pp
, length
/ 2 - 1);
987 for (l
= length
- 2; l
; l
-= 2)
989 tree decl
= decls
[l
/ 2 - 1];
990 pp_wide_integer (&asan_pp
, offsets
[l
] - base_offset
);
992 pp_wide_integer (&asan_pp
, offsets
[l
- 1] - offsets
[l
]);
994 if (DECL_P (decl
) && DECL_NAME (decl
))
996 pp_decimal_int (&asan_pp
, IDENTIFIER_LENGTH (DECL_NAME (decl
)));
998 pp_tree_identifier (&asan_pp
, DECL_NAME (decl
));
1001 pp_string (&asan_pp
, "9 <unknown>");
1002 pp_space (&asan_pp
);
1004 str_cst
= asan_pp_string (&asan_pp
);
1006 /* Emit the prologue sequence. */
1007 if (asan_frame_size
> 32 && asan_frame_size
<= 65536 && pbase
1008 && ASAN_USE_AFTER_RETURN
)
1010 use_after_return_class
= floor_log2 (asan_frame_size
- 1) - 5;
1011 /* __asan_stack_malloc_N guarantees alignment
1012 N < 6 ? (64 << N) : 4096 bytes. */
1013 if (alignb
> (use_after_return_class
< 6
1014 ? (64U << use_after_return_class
) : 4096U))
1015 use_after_return_class
= -1;
1016 else if (alignb
> ASAN_RED_ZONE_SIZE
&& (asan_frame_size
& (alignb
- 1)))
1017 base_align_bias
= ((asan_frame_size
+ alignb
- 1)
1018 & ~(alignb
- HOST_WIDE_INT_1
)) - asan_frame_size
;
1020 /* Align base if target is STRICT_ALIGNMENT. */
1021 if (STRICT_ALIGNMENT
)
1022 base
= expand_binop (Pmode
, and_optab
, base
,
1023 gen_int_mode (-((GET_MODE_ALIGNMENT (SImode
)
1024 << ASAN_SHADOW_SHIFT
)
1025 / BITS_PER_UNIT
), Pmode
), NULL_RTX
,
1028 if (use_after_return_class
== -1 && pbase
)
1029 emit_move_insn (pbase
, base
);
1031 base
= expand_binop (Pmode
, add_optab
, base
,
1032 gen_int_mode (base_offset
- base_align_bias
, Pmode
),
1033 NULL_RTX
, 1, OPTAB_DIRECT
);
1034 orig_base
= NULL_RTX
;
1035 if (use_after_return_class
!= -1)
1037 if (asan_detect_stack_use_after_return
== NULL_TREE
)
1039 id
= get_identifier ("__asan_option_detect_stack_use_after_return");
1040 decl
= build_decl (BUILTINS_LOCATION
, VAR_DECL
, id
,
1042 SET_DECL_ASSEMBLER_NAME (decl
, id
);
1043 TREE_ADDRESSABLE (decl
) = 1;
1044 DECL_ARTIFICIAL (decl
) = 1;
1045 DECL_IGNORED_P (decl
) = 1;
1046 DECL_EXTERNAL (decl
) = 1;
1047 TREE_STATIC (decl
) = 1;
1048 TREE_PUBLIC (decl
) = 1;
1049 TREE_USED (decl
) = 1;
1050 asan_detect_stack_use_after_return
= decl
;
1052 orig_base
= gen_reg_rtx (Pmode
);
1053 emit_move_insn (orig_base
, base
);
1054 ret
= expand_normal (asan_detect_stack_use_after_return
);
1055 lab
= gen_label_rtx ();
1056 int very_likely
= REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1);
1057 emit_cmp_and_jump_insns (ret
, const0_rtx
, EQ
, NULL_RTX
,
1058 VOIDmode
, 0, lab
, very_likely
);
1059 snprintf (buf
, sizeof buf
, "__asan_stack_malloc_%d",
1060 use_after_return_class
);
1061 ret
= init_one_libfunc (buf
);
1062 rtx addr
= convert_memory_address (ptr_mode
, base
);
1063 ret
= emit_library_call_value (ret
, NULL_RTX
, LCT_NORMAL
, ptr_mode
, 2,
1064 GEN_INT (asan_frame_size
1066 TYPE_MODE (pointer_sized_int_node
),
1068 ret
= convert_memory_address (Pmode
, ret
);
1069 emit_move_insn (base
, ret
);
1071 emit_move_insn (pbase
, expand_binop (Pmode
, add_optab
, base
,
1072 gen_int_mode (base_align_bias
1073 - base_offset
, Pmode
),
1074 NULL_RTX
, 1, OPTAB_DIRECT
));
1076 mem
= gen_rtx_MEM (ptr_mode
, base
);
1077 mem
= adjust_address (mem
, VOIDmode
, base_align_bias
);
1078 emit_move_insn (mem
, gen_int_mode (ASAN_STACK_FRAME_MAGIC
, ptr_mode
));
1079 mem
= adjust_address (mem
, VOIDmode
, GET_MODE_SIZE (ptr_mode
));
1080 emit_move_insn (mem
, expand_normal (str_cst
));
1081 mem
= adjust_address (mem
, VOIDmode
, GET_MODE_SIZE (ptr_mode
));
1082 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASANPC", current_function_funcdef_no
);
1083 id
= get_identifier (buf
);
1084 decl
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
1085 VAR_DECL
, id
, char_type_node
);
1086 SET_DECL_ASSEMBLER_NAME (decl
, id
);
1087 TREE_ADDRESSABLE (decl
) = 1;
1088 TREE_READONLY (decl
) = 1;
1089 DECL_ARTIFICIAL (decl
) = 1;
1090 DECL_IGNORED_P (decl
) = 1;
1091 TREE_STATIC (decl
) = 1;
1092 TREE_PUBLIC (decl
) = 0;
1093 TREE_USED (decl
) = 1;
1094 DECL_INITIAL (decl
) = decl
;
1095 TREE_ASM_WRITTEN (decl
) = 1;
1096 TREE_ASM_WRITTEN (id
) = 1;
1097 emit_move_insn (mem
, expand_normal (build_fold_addr_expr (decl
)));
1098 shadow_base
= expand_binop (Pmode
, lshr_optab
, base
,
1099 GEN_INT (ASAN_SHADOW_SHIFT
),
1100 NULL_RTX
, 1, OPTAB_DIRECT
);
1102 = plus_constant (Pmode
, shadow_base
,
1103 targetm
.asan_shadow_offset ()
1104 + (base_align_bias
>> ASAN_SHADOW_SHIFT
));
1105 gcc_assert (asan_shadow_set
!= -1
1106 && (ASAN_RED_ZONE_SIZE
>> ASAN_SHADOW_SHIFT
) == 4);
1107 shadow_mem
= gen_rtx_MEM (SImode
, shadow_base
);
1108 set_mem_alias_set (shadow_mem
, asan_shadow_set
);
1109 if (STRICT_ALIGNMENT
)
1110 set_mem_align (shadow_mem
, (GET_MODE_ALIGNMENT (SImode
)));
1111 prev_offset
= base_offset
;
1112 for (l
= length
; l
; l
-= 2)
1115 cur_shadow_byte
= ASAN_STACK_MAGIC_RIGHT
;
1116 offset
= offsets
[l
- 1];
1117 if ((offset
- base_offset
) & (ASAN_RED_ZONE_SIZE
- 1))
1121 = base_offset
+ ((offset
- base_offset
)
1122 & ~(ASAN_RED_ZONE_SIZE
- HOST_WIDE_INT_1
));
1123 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1124 (aoff
- prev_offset
)
1125 >> ASAN_SHADOW_SHIFT
);
1127 for (i
= 0; i
< 4; i
++, aoff
+= (1 << ASAN_SHADOW_SHIFT
))
1130 if (aoff
< offset
- (1 << ASAN_SHADOW_SHIFT
) + 1)
1131 shadow_bytes
[i
] = 0;
1133 shadow_bytes
[i
] = offset
- aoff
;
1136 shadow_bytes
[i
] = ASAN_STACK_MAGIC_PARTIAL
;
1137 emit_move_insn (shadow_mem
, asan_shadow_cst (shadow_bytes
));
1140 while (offset
<= offsets
[l
- 2] - ASAN_RED_ZONE_SIZE
)
1142 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1143 (offset
- prev_offset
)
1144 >> ASAN_SHADOW_SHIFT
);
1145 prev_offset
= offset
;
1146 memset (shadow_bytes
, cur_shadow_byte
, 4);
1147 emit_move_insn (shadow_mem
, asan_shadow_cst (shadow_bytes
));
1148 offset
+= ASAN_RED_ZONE_SIZE
;
1150 cur_shadow_byte
= ASAN_STACK_MAGIC_MIDDLE
;
1152 do_pending_stack_adjust ();
1154 /* Construct epilogue sequence. */
1158 if (use_after_return_class
!= -1)
1160 rtx lab2
= gen_label_rtx ();
1161 char c
= (char) ASAN_STACK_MAGIC_USE_AFTER_RET
;
1162 int very_likely
= REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1);
1163 emit_cmp_and_jump_insns (orig_base
, base
, EQ
, NULL_RTX
,
1164 VOIDmode
, 0, lab2
, very_likely
);
1165 shadow_mem
= gen_rtx_MEM (BLKmode
, shadow_base
);
1166 set_mem_alias_set (shadow_mem
, asan_shadow_set
);
1167 mem
= gen_rtx_MEM (ptr_mode
, base
);
1168 mem
= adjust_address (mem
, VOIDmode
, base_align_bias
);
1169 emit_move_insn (mem
, gen_int_mode (ASAN_STACK_RETIRED_MAGIC
, ptr_mode
));
1170 unsigned HOST_WIDE_INT sz
= asan_frame_size
>> ASAN_SHADOW_SHIFT
;
1171 if (use_after_return_class
< 5
1172 && can_store_by_pieces (sz
, builtin_memset_read_str
, &c
,
1173 BITS_PER_UNIT
, true))
1174 store_by_pieces (shadow_mem
, sz
, builtin_memset_read_str
, &c
,
1175 BITS_PER_UNIT
, true, 0);
1176 else if (use_after_return_class
>= 5
1177 || !set_storage_via_setmem (shadow_mem
,
1179 gen_int_mode (c
, QImode
),
1180 BITS_PER_UNIT
, BITS_PER_UNIT
,
1183 snprintf (buf
, sizeof buf
, "__asan_stack_free_%d",
1184 use_after_return_class
);
1185 ret
= init_one_libfunc (buf
);
1186 rtx addr
= convert_memory_address (ptr_mode
, base
);
1187 rtx orig_addr
= convert_memory_address (ptr_mode
, orig_base
);
1188 emit_library_call (ret
, LCT_NORMAL
, ptr_mode
, 3, addr
, ptr_mode
,
1189 GEN_INT (asan_frame_size
+ base_align_bias
),
1190 TYPE_MODE (pointer_sized_int_node
),
1191 orig_addr
, ptr_mode
);
1193 lab
= gen_label_rtx ();
1198 shadow_mem
= gen_rtx_MEM (BLKmode
, shadow_base
);
1199 set_mem_alias_set (shadow_mem
, asan_shadow_set
);
1201 if (STRICT_ALIGNMENT
)
1202 set_mem_align (shadow_mem
, (GET_MODE_ALIGNMENT (SImode
)));
1204 prev_offset
= base_offset
;
1205 last_offset
= base_offset
;
1207 for (l
= length
; l
; l
-= 2)
1209 offset
= base_offset
+ ((offsets
[l
- 1] - base_offset
)
1210 & ~(ASAN_RED_ZONE_SIZE
- HOST_WIDE_INT_1
));
1211 if (last_offset
+ last_size
!= offset
)
1213 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1214 (last_offset
- prev_offset
)
1215 >> ASAN_SHADOW_SHIFT
);
1216 prev_offset
= last_offset
;
1217 asan_clear_shadow (shadow_mem
, last_size
>> ASAN_SHADOW_SHIFT
);
1218 last_offset
= offset
;
1221 last_size
+= base_offset
+ ((offsets
[l
- 2] - base_offset
)
1222 & ~(ASAN_RED_ZONE_SIZE
- HOST_WIDE_INT_1
))
1227 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1228 (last_offset
- prev_offset
)
1229 >> ASAN_SHADOW_SHIFT
);
1230 asan_clear_shadow (shadow_mem
, last_size
>> ASAN_SHADOW_SHIFT
);
1233 do_pending_stack_adjust ();
1242 /* Return true if DECL, a global var, might be overridden and needs
1243 therefore a local alias. */
1246 asan_needs_local_alias (tree decl
)
1248 return DECL_WEAK (decl
) || !targetm
.binds_local_p (decl
);
1251 /* Return true if DECL is a VAR_DECL that should be protected
1252 by Address Sanitizer, by appending a red zone with protected
1253 shadow memory after it and aligning it to at least
1254 ASAN_RED_ZONE_SIZE bytes. */
1257 asan_protect_global (tree decl
)
1264 if (TREE_CODE (decl
) == STRING_CST
)
1266 /* Instrument all STRING_CSTs except those created
1267 by asan_pp_string here. */
1268 if (shadow_ptr_types
[0] != NULL_TREE
1269 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1270 && TREE_TYPE (TREE_TYPE (decl
)) == TREE_TYPE (shadow_ptr_types
[0]))
1274 if (TREE_CODE (decl
) != VAR_DECL
1275 /* TLS vars aren't statically protectable. */
1276 || DECL_THREAD_LOCAL_P (decl
)
1277 /* Externs will be protected elsewhere. */
1278 || DECL_EXTERNAL (decl
)
1279 || !DECL_RTL_SET_P (decl
)
1280 /* Comdat vars pose an ABI problem, we can't know if
1281 the var that is selected by the linker will have
1283 || DECL_ONE_ONLY (decl
)
1284 /* Similarly for common vars. People can use -fno-common. */
1285 || (DECL_COMMON (decl
) && TREE_PUBLIC (decl
))
1286 /* Don't protect if using user section, often vars placed
1287 into user section from multiple TUs are then assumed
1288 to be an array of such vars, putting padding in there
1289 breaks this assumption. */
1290 || (DECL_SECTION_NAME (decl
) != NULL_TREE
1291 && !DECL_HAS_IMPLICIT_SECTION_NAME_P (decl
))
1292 || DECL_SIZE (decl
) == 0
1293 || ASAN_RED_ZONE_SIZE
* BITS_PER_UNIT
> MAX_OFILE_ALIGNMENT
1294 || !valid_constant_size_p (DECL_SIZE_UNIT (decl
))
1295 || DECL_ALIGN_UNIT (decl
) > 2 * ASAN_RED_ZONE_SIZE
)
1298 rtl
= DECL_RTL (decl
);
1299 if (!MEM_P (rtl
) || GET_CODE (XEXP (rtl
, 0)) != SYMBOL_REF
)
1301 symbol
= XEXP (rtl
, 0);
1303 if (CONSTANT_POOL_ADDRESS_P (symbol
)
1304 || TREE_CONSTANT_POOL_ADDRESS_P (symbol
))
1307 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl
)))
1310 #ifndef ASM_OUTPUT_DEF
1311 if (asan_needs_local_alias (decl
))
1318 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1319 IS_STORE is either 1 (for a store) or 0 (for a load). */
1322 report_error_func (bool is_store
, HOST_WIDE_INT size_in_bytes
, bool slow_p
)
1324 static enum built_in_function report
[2][6]
1325 = { { BUILT_IN_ASAN_REPORT_LOAD1
, BUILT_IN_ASAN_REPORT_LOAD2
,
1326 BUILT_IN_ASAN_REPORT_LOAD4
, BUILT_IN_ASAN_REPORT_LOAD8
,
1327 BUILT_IN_ASAN_REPORT_LOAD16
, BUILT_IN_ASAN_REPORT_LOAD_N
},
1328 { BUILT_IN_ASAN_REPORT_STORE1
, BUILT_IN_ASAN_REPORT_STORE2
,
1329 BUILT_IN_ASAN_REPORT_STORE4
, BUILT_IN_ASAN_REPORT_STORE8
,
1330 BUILT_IN_ASAN_REPORT_STORE16
, BUILT_IN_ASAN_REPORT_STORE_N
} };
1331 if ((size_in_bytes
& (size_in_bytes
- 1)) != 0
1332 || size_in_bytes
> 16
1334 return builtin_decl_implicit (report
[is_store
][5]);
1335 return builtin_decl_implicit (report
[is_store
][exact_log2 (size_in_bytes
)]);
1338 /* Split the current basic block and create a condition statement
1339 insertion point right before or after the statement pointed to by
1340 ITER. Return an iterator to the point at which the caller might
1341 safely insert the condition statement.
1343 THEN_BLOCK must be set to the address of an uninitialized instance
1344 of basic_block. The function will then set *THEN_BLOCK to the
1345 'then block' of the condition statement to be inserted by the
1348 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1349 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1351 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1352 block' of the condition statement to be inserted by the caller.
1354 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1355 statements starting from *ITER, and *THEN_BLOCK is a new empty
1358 *ITER is adjusted to point to always point to the first statement
1359 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1360 same as what ITER was pointing to prior to calling this function,
1361 if BEFORE_P is true; otherwise, it is its following statement. */
1363 gimple_stmt_iterator
1364 create_cond_insert_point (gimple_stmt_iterator
*iter
,
1366 bool then_more_likely_p
,
1367 bool create_then_fallthru_edge
,
1368 basic_block
*then_block
,
1369 basic_block
*fallthrough_block
)
1371 gimple_stmt_iterator gsi
= *iter
;
1373 if (!gsi_end_p (gsi
) && before_p
)
1376 basic_block cur_bb
= gsi_bb (*iter
);
1378 edge e
= split_block (cur_bb
, gsi_stmt (gsi
));
1380 /* Get a hold on the 'condition block', the 'then block' and the
1382 basic_block cond_bb
= e
->src
;
1383 basic_block fallthru_bb
= e
->dest
;
1384 basic_block then_bb
= create_empty_bb (cond_bb
);
1387 add_bb_to_loop (then_bb
, cond_bb
->loop_father
);
1388 loops_state_set (LOOPS_NEED_FIXUP
);
1391 /* Set up the newly created 'then block'. */
1392 e
= make_edge (cond_bb
, then_bb
, EDGE_TRUE_VALUE
);
1393 int fallthrough_probability
1394 = then_more_likely_p
1395 ? PROB_VERY_UNLIKELY
1396 : PROB_ALWAYS
- PROB_VERY_UNLIKELY
;
1397 e
->probability
= PROB_ALWAYS
- fallthrough_probability
;
1398 if (create_then_fallthru_edge
)
1399 make_single_succ_edge (then_bb
, fallthru_bb
, EDGE_FALLTHRU
);
1401 /* Set up the fallthrough basic block. */
1402 e
= find_edge (cond_bb
, fallthru_bb
);
1403 e
->flags
= EDGE_FALSE_VALUE
;
1404 e
->count
= cond_bb
->count
;
1405 e
->probability
= fallthrough_probability
;
1407 /* Update dominance info for the newly created then_bb; note that
1408 fallthru_bb's dominance info has already been updated by
1410 if (dom_info_available_p (CDI_DOMINATORS
))
1411 set_immediate_dominator (CDI_DOMINATORS
, then_bb
, cond_bb
);
1413 *then_block
= then_bb
;
1414 *fallthrough_block
= fallthru_bb
;
1415 *iter
= gsi_start_bb (fallthru_bb
);
1417 return gsi_last_bb (cond_bb
);
1420 /* Insert an if condition followed by a 'then block' right before the
1421 statement pointed to by ITER. The fallthrough block -- which is the
1422 else block of the condition as well as the destination of the
1423 outcoming edge of the 'then block' -- starts with the statement
1426 COND is the condition of the if.
1428 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1429 'then block' is higher than the probability of the edge to the
1432 Upon completion of the function, *THEN_BB is set to the newly
1433 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1436 *ITER is adjusted to still point to the same statement it was
1437 pointing to initially. */
1440 insert_if_then_before_iter (gimple cond
,
1441 gimple_stmt_iterator
*iter
,
1442 bool then_more_likely_p
,
1443 basic_block
*then_bb
,
1444 basic_block
*fallthrough_bb
)
1446 gimple_stmt_iterator cond_insert_point
=
1447 create_cond_insert_point (iter
,
1450 /*create_then_fallthru_edge=*/true,
1453 gsi_insert_after (&cond_insert_point
, cond
, GSI_NEW_STMT
);
1457 (base_addr >> ASAN_SHADOW_SHIFT) + targetm.asan_shadow_offset (). */
1460 build_shadow_mem_access (gimple_stmt_iterator
*gsi
, location_t location
,
1461 tree base_addr
, tree shadow_ptr_type
)
1463 tree t
, uintptr_type
= TREE_TYPE (base_addr
);
1464 tree shadow_type
= TREE_TYPE (shadow_ptr_type
);
1467 t
= build_int_cst (uintptr_type
, ASAN_SHADOW_SHIFT
);
1468 g
= gimple_build_assign_with_ops (RSHIFT_EXPR
,
1469 make_ssa_name (uintptr_type
, NULL
),
1471 gimple_set_location (g
, location
);
1472 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
1474 t
= build_int_cst (uintptr_type
, targetm
.asan_shadow_offset ());
1475 g
= gimple_build_assign_with_ops (PLUS_EXPR
,
1476 make_ssa_name (uintptr_type
, NULL
),
1477 gimple_assign_lhs (g
), t
);
1478 gimple_set_location (g
, location
);
1479 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
1481 g
= gimple_build_assign_with_ops (NOP_EXPR
,
1482 make_ssa_name (shadow_ptr_type
, NULL
),
1483 gimple_assign_lhs (g
), NULL_TREE
);
1484 gimple_set_location (g
, location
);
1485 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
1487 t
= build2 (MEM_REF
, shadow_type
, gimple_assign_lhs (g
),
1488 build_int_cst (shadow_ptr_type
, 0));
1489 g
= gimple_build_assign_with_ops (MEM_REF
,
1490 make_ssa_name (shadow_type
, NULL
),
1492 gimple_set_location (g
, location
);
1493 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
1494 return gimple_assign_lhs (g
);
1497 /* Instrument the memory access instruction BASE. Insert new
1498 statements before or after ITER.
1500 Note that the memory access represented by BASE can be either an
1501 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1502 location. IS_STORE is TRUE for a store, FALSE for a load.
1503 BEFORE_P is TRUE for inserting the instrumentation code before
1504 ITER, FALSE for inserting it after ITER.
1506 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1507 statement it was pointing to prior to calling this function,
1508 otherwise, it points to the statement logically following it. */
1511 build_check_stmt (location_t location
, tree base
, gimple_stmt_iterator
*iter
,
1512 bool before_p
, bool is_store
, HOST_WIDE_INT size_in_bytes
,
1513 bool slow_p
= false)
1515 gimple_stmt_iterator gsi
;
1516 basic_block then_bb
, else_bb
;
1517 tree t
, base_addr
, shadow
;
1519 tree shadow_ptr_type
= shadow_ptr_types
[size_in_bytes
== 16 ? 1 : 0];
1520 tree shadow_type
= TREE_TYPE (shadow_ptr_type
);
1522 = build_nonstandard_integer_type (TYPE_PRECISION (TREE_TYPE (base
)), 1);
1523 tree base_ssa
= base
;
1524 HOST_WIDE_INT real_size_in_bytes
= size_in_bytes
;
1525 tree sz_arg
= NULL_TREE
;
1527 if (size_in_bytes
== 1)
1529 else if ((size_in_bytes
& (size_in_bytes
- 1)) != 0
1530 || size_in_bytes
> 16
1533 real_size_in_bytes
= 1;
1537 /* Get an iterator on the point where we can add the condition
1538 statement for the instrumentation. */
1539 gsi
= create_cond_insert_point (iter
, before_p
,
1540 /*then_more_likely_p=*/false,
1541 /*create_then_fallthru_edge=*/false,
1545 base
= unshare_expr (base
);
1547 /* BASE can already be an SSA_NAME; in that case, do not create a
1548 new SSA_NAME for it. */
1549 if (TREE_CODE (base
) != SSA_NAME
)
1551 g
= gimple_build_assign_with_ops (TREE_CODE (base
),
1552 make_ssa_name (TREE_TYPE (base
), NULL
),
1554 gimple_set_location (g
, location
);
1555 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1556 base_ssa
= gimple_assign_lhs (g
);
1559 g
= gimple_build_assign_with_ops (NOP_EXPR
,
1560 make_ssa_name (uintptr_type
, NULL
),
1561 base_ssa
, NULL_TREE
);
1562 gimple_set_location (g
, location
);
1563 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1564 base_addr
= gimple_assign_lhs (g
);
1567 (base_addr >> ASAN_SHADOW_SHIFT) + targetm.asan_shadow_offset (). */
1568 shadow
= build_shadow_mem_access (&gsi
, location
, base_addr
,
1571 if (real_size_in_bytes
< 8)
1573 /* Slow path for 1, 2 and 4 byte accesses.
1575 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
1576 gimple_seq seq
= NULL
;
1577 gimple shadow_test
= build_assign (NE_EXPR
, shadow
, 0);
1578 gimple_seq_add_stmt (&seq
, shadow_test
);
1579 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
, base_addr
, 7));
1580 gimple_seq_add_stmt (&seq
, build_type_cast (shadow_type
,
1581 gimple_seq_last (seq
)));
1582 if (real_size_in_bytes
> 1)
1583 gimple_seq_add_stmt (&seq
,
1584 build_assign (PLUS_EXPR
, gimple_seq_last (seq
),
1585 real_size_in_bytes
- 1));
1586 gimple_seq_add_stmt (&seq
, build_assign (GE_EXPR
, gimple_seq_last (seq
),
1588 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
, shadow_test
,
1589 gimple_seq_last (seq
)));
1590 t
= gimple_assign_lhs (gimple_seq_last (seq
));
1591 gimple_seq_set_location (seq
, location
);
1592 gsi_insert_seq_after (&gsi
, seq
, GSI_CONTINUE_LINKING
);
1593 /* For weird access sizes or misaligned, check first and last byte. */
1596 g
= gimple_build_assign_with_ops (PLUS_EXPR
,
1597 make_ssa_name (uintptr_type
, NULL
),
1599 build_int_cst (uintptr_type
,
1600 size_in_bytes
- 1));
1601 gimple_set_location (g
, location
);
1602 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1603 tree base_end_addr
= gimple_assign_lhs (g
);
1605 shadow
= build_shadow_mem_access (&gsi
, location
, base_end_addr
,
1608 shadow_test
= build_assign (NE_EXPR
, shadow
, 0);
1609 gimple_seq_add_stmt (&seq
, shadow_test
);
1610 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
,
1612 gimple_seq_add_stmt (&seq
, build_type_cast (shadow_type
,
1613 gimple_seq_last (seq
)));
1614 gimple_seq_add_stmt (&seq
, build_assign (GE_EXPR
,
1615 gimple_seq_last (seq
),
1617 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
, shadow_test
,
1618 gimple_seq_last (seq
)));
1619 gimple_seq_add_stmt (&seq
, build_assign (BIT_IOR_EXPR
, t
,
1620 gimple_seq_last (seq
)));
1621 t
= gimple_assign_lhs (gimple_seq_last (seq
));
1622 gimple_seq_set_location (seq
, location
);
1623 gsi_insert_seq_after (&gsi
, seq
, GSI_CONTINUE_LINKING
);
1624 sz_arg
= build_int_cst (pointer_sized_int_node
, size_in_bytes
);
1630 g
= gimple_build_cond (NE_EXPR
, t
, build_int_cst (TREE_TYPE (t
), 0),
1631 NULL_TREE
, NULL_TREE
);
1632 gimple_set_location (g
, location
);
1633 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1635 /* Generate call to the run-time library (e.g. __asan_report_load8). */
1636 gsi
= gsi_start_bb (then_bb
);
1637 g
= gimple_build_call (report_error_func (is_store
, size_in_bytes
, slow_p
),
1638 sz_arg
? 2 : 1, base_addr
, sz_arg
);
1639 gimple_set_location (g
, location
);
1640 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1642 *iter
= gsi_start_bb (else_bb
);
1645 /* If T represents a memory access, add instrumentation code before ITER.
1646 LOCATION is source code location.
1647 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
1650 instrument_derefs (gimple_stmt_iterator
*iter
, tree t
,
1651 location_t location
, bool is_store
)
1653 if (is_store
&& !ASAN_INSTRUMENT_WRITES
)
1655 if (!is_store
&& !ASAN_INSTRUMENT_READS
)
1659 HOST_WIDE_INT size_in_bytes
;
1661 type
= TREE_TYPE (t
);
1662 switch (TREE_CODE (t
))
1675 size_in_bytes
= int_size_in_bytes (type
);
1676 if (size_in_bytes
<= 0)
1679 HOST_WIDE_INT bitsize
, bitpos
;
1681 enum machine_mode mode
;
1682 int volatilep
= 0, unsignedp
= 0;
1683 tree inner
= get_inner_reference (t
, &bitsize
, &bitpos
, &offset
,
1684 &mode
, &unsignedp
, &volatilep
, false);
1685 if (((size_in_bytes
& (size_in_bytes
- 1)) == 0
1686 && (bitpos
% (size_in_bytes
* BITS_PER_UNIT
)))
1687 || bitsize
!= size_in_bytes
* BITS_PER_UNIT
)
1689 if (TREE_CODE (t
) == COMPONENT_REF
1690 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t
, 1)) != NULL_TREE
)
1692 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t
, 1));
1693 instrument_derefs (iter
, build3 (COMPONENT_REF
, TREE_TYPE (repr
),
1694 TREE_OPERAND (t
, 0), repr
,
1695 NULL_TREE
), location
, is_store
);
1699 if (bitpos
% BITS_PER_UNIT
)
1702 if (TREE_CODE (inner
) == VAR_DECL
1703 && offset
== NULL_TREE
1705 && DECL_SIZE (inner
)
1706 && tree_fits_shwi_p (DECL_SIZE (inner
))
1707 && bitpos
+ bitsize
<= tree_to_shwi (DECL_SIZE (inner
)))
1709 if (DECL_THREAD_LOCAL_P (inner
))
1711 if (!TREE_STATIC (inner
))
1713 /* Automatic vars in the current function will be always
1715 if (decl_function_context (inner
) == current_function_decl
)
1718 /* Always instrument external vars, they might be dynamically
1720 else if (!DECL_EXTERNAL (inner
))
1722 /* For static vars if they are known not to be dynamically
1723 initialized, they will be always accessible. */
1724 varpool_node
*vnode
= varpool_get_node (inner
);
1725 if (vnode
&& !vnode
->dynamically_initialized
)
1730 base
= build_fold_addr_expr (t
);
1731 if (!has_mem_ref_been_instrumented (base
, size_in_bytes
))
1733 bool slow_p
= false;
1734 if (size_in_bytes
> 1)
1736 if ((size_in_bytes
& (size_in_bytes
- 1)) != 0
1737 || size_in_bytes
> 16)
1741 unsigned int align
= get_object_alignment (t
);
1742 if (align
< size_in_bytes
* BITS_PER_UNIT
)
1744 /* On non-strict alignment targets, if
1745 16-byte access is just 8-byte aligned,
1746 this will result in misaligned shadow
1747 memory 2 byte load, but otherwise can
1748 be handled using one read. */
1749 if (size_in_bytes
!= 16
1751 || align
< 8 * BITS_PER_UNIT
)
1756 build_check_stmt (location
, base
, iter
, /*before_p=*/true,
1757 is_store
, size_in_bytes
, slow_p
);
1758 update_mem_ref_hash_table (base
, size_in_bytes
);
1759 update_mem_ref_hash_table (t
, size_in_bytes
);
1764 /* Instrument an access to a contiguous memory region that starts at
1765 the address pointed to by BASE, over a length of LEN (expressed in
1766 the sizeof (*BASE) bytes). ITER points to the instruction before
1767 which the instrumentation instructions must be inserted. LOCATION
1768 is the source location that the instrumentation instructions must
1769 have. If IS_STORE is true, then the memory access is a store;
1770 otherwise, it's a load. */
1773 instrument_mem_region_access (tree base
, tree len
,
1774 gimple_stmt_iterator
*iter
,
1775 location_t location
, bool is_store
)
1777 if (!POINTER_TYPE_P (TREE_TYPE (base
))
1778 || !INTEGRAL_TYPE_P (TREE_TYPE (len
))
1779 || integer_zerop (len
))
1782 gimple_stmt_iterator gsi
= *iter
;
1784 basic_block fallthrough_bb
= NULL
, then_bb
= NULL
;
1786 /* If the beginning of the memory region has already been
1787 instrumented, do not instrument it. */
1788 bool start_instrumented
= has_mem_ref_been_instrumented (base
, 1);
1790 /* If the end of the memory region has already been instrumented, do
1791 not instrument it. */
1792 tree end
= asan_mem_ref_get_end (base
, len
);
1793 bool end_instrumented
= has_mem_ref_been_instrumented (end
, 1);
1795 if (start_instrumented
&& end_instrumented
)
1798 if (!is_gimple_constant (len
))
1800 /* So, the length of the memory area to asan-protect is
1801 non-constant. Let's guard the generated instrumentation code
1806 //asan instrumentation code goes here.
1808 // falltrough instructions, starting with *ITER. */
1810 gimple g
= gimple_build_cond (NE_EXPR
,
1812 build_int_cst (TREE_TYPE (len
), 0),
1813 NULL_TREE
, NULL_TREE
);
1814 gimple_set_location (g
, location
);
1815 insert_if_then_before_iter (g
, iter
, /*then_more_likely_p=*/true,
1816 &then_bb
, &fallthrough_bb
);
1817 /* Note that fallthrough_bb starts with the statement that was
1818 pointed to by ITER. */
1820 /* The 'then block' of the 'if (len != 0) condition is where
1821 we'll generate the asan instrumentation code now. */
1822 gsi
= gsi_last_bb (then_bb
);
1825 if (!start_instrumented
)
1827 /* Instrument the beginning of the memory region to be accessed,
1828 and arrange for the rest of the intrumentation code to be
1829 inserted in the then block *after* the current gsi. */
1830 build_check_stmt (location
, base
, &gsi
, /*before_p=*/true, is_store
, 1);
1833 /* We are in the case where the length of the region is not
1834 constant; so instrumentation code is being generated in the
1835 'then block' of the 'if (len != 0) condition. Let's arrange
1836 for the subsequent instrumentation statements to go in the
1838 gsi
= gsi_last_bb (then_bb
);
1842 /* Don't remember this access as instrumented, if length
1843 is unknown. It might be zero and not being actually
1844 instrumented, so we can't rely on it being instrumented. */
1845 update_mem_ref_hash_table (base
, 1);
1849 if (end_instrumented
)
1852 /* We want to instrument the access at the end of the memory region,
1853 which is at (base + len - 1). */
1855 /* offset = len - 1; */
1856 len
= unshare_expr (len
);
1858 gimple_seq seq
= NULL
;
1859 if (TREE_CODE (len
) == INTEGER_CST
)
1860 offset
= fold_build2 (MINUS_EXPR
, size_type_node
,
1861 fold_convert (size_type_node
, len
),
1862 build_int_cst (size_type_node
, 1));
1868 if (TREE_CODE (len
) != SSA_NAME
)
1870 t
= make_ssa_name (TREE_TYPE (len
), NULL
);
1871 g
= gimple_build_assign_with_ops (TREE_CODE (len
), t
, len
, NULL
);
1872 gimple_set_location (g
, location
);
1873 gimple_seq_add_stmt_without_update (&seq
, g
);
1876 if (!useless_type_conversion_p (size_type_node
, TREE_TYPE (len
)))
1878 t
= make_ssa_name (size_type_node
, NULL
);
1879 g
= gimple_build_assign_with_ops (NOP_EXPR
, t
, len
, NULL
);
1880 gimple_set_location (g
, location
);
1881 gimple_seq_add_stmt_without_update (&seq
, g
);
1885 t
= make_ssa_name (size_type_node
, NULL
);
1886 g
= gimple_build_assign_with_ops (MINUS_EXPR
, t
, len
,
1887 build_int_cst (size_type_node
, 1));
1888 gimple_set_location (g
, location
);
1889 gimple_seq_add_stmt_without_update (&seq
, g
);
1890 offset
= gimple_assign_lhs (g
);
1894 base
= unshare_expr (base
);
1896 gimple_build_assign_with_ops (TREE_CODE (base
),
1897 make_ssa_name (TREE_TYPE (base
), NULL
),
1899 gimple_set_location (region_end
, location
);
1900 gimple_seq_add_stmt_without_update (&seq
, region_end
);
1902 /* _2 = _1 + offset; */
1904 gimple_build_assign_with_ops (POINTER_PLUS_EXPR
,
1905 make_ssa_name (TREE_TYPE (base
), NULL
),
1906 gimple_assign_lhs (region_end
),
1908 gimple_set_location (region_end
, location
);
1909 gimple_seq_add_stmt_without_update (&seq
, region_end
);
1910 gsi_insert_seq_before (&gsi
, seq
, GSI_SAME_STMT
);
1912 /* instrument access at _2; */
1913 gsi
= gsi_for_stmt (region_end
);
1914 build_check_stmt (location
, gimple_assign_lhs (region_end
),
1915 &gsi
, /*before_p=*/false, is_store
, 1);
1917 if (then_bb
== NULL
)
1918 update_mem_ref_hash_table (end
, 1);
1920 *iter
= gsi_for_stmt (gsi_stmt (*iter
));
1923 /* Instrument the call (to the builtin strlen function) pointed to by
1926 This function instruments the access to the first byte of the
1927 argument, right before the call. After the call it instruments the
1928 access to the last byte of the argument; it uses the result of the
1929 call to deduce the offset of that last byte.
1931 Upon completion, iff the call has actually been instrumented, this
1932 function returns TRUE and *ITER points to the statement logically
1933 following the built-in strlen function call *ITER was initially
1934 pointing to. Otherwise, the function returns FALSE and *ITER
1935 remains unchanged. */
1938 instrument_strlen_call (gimple_stmt_iterator
*iter
)
1940 gimple call
= gsi_stmt (*iter
);
1941 gcc_assert (is_gimple_call (call
));
1943 tree callee
= gimple_call_fndecl (call
);
1944 gcc_assert (is_builtin_fn (callee
)
1945 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
1946 && DECL_FUNCTION_CODE (callee
) == BUILT_IN_STRLEN
);
1948 tree len
= gimple_call_lhs (call
);
1950 /* Some passes might clear the return value of the strlen call;
1951 bail out in that case. Return FALSE as we are not advancing
1954 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (len
)));
1956 location_t loc
= gimple_location (call
);
1957 tree str_arg
= gimple_call_arg (call
, 0);
1959 /* Instrument the access to the first byte of str_arg. i.e:
1961 _1 = str_arg; instrument (_1); */
1962 tree cptr_type
= build_pointer_type (char_type_node
);
1963 gimple str_arg_ssa
=
1964 gimple_build_assign_with_ops (NOP_EXPR
,
1965 make_ssa_name (cptr_type
, NULL
),
1967 gimple_set_location (str_arg_ssa
, loc
);
1968 gimple_stmt_iterator gsi
= *iter
;
1969 gsi_insert_before (&gsi
, str_arg_ssa
, GSI_NEW_STMT
);
1970 build_check_stmt (loc
, gimple_assign_lhs (str_arg_ssa
), &gsi
,
1971 /*before_p=*/false, /*is_store=*/false, 1);
1973 /* If we initially had an instruction like:
1975 int n = strlen (str)
1977 we now want to instrument the access to str[n], after the
1978 instruction above.*/
1980 /* So let's build the access to str[n] that is, access through the
1981 pointer_plus expr: (_1 + len). */
1983 gimple_build_assign_with_ops (POINTER_PLUS_EXPR
,
1984 make_ssa_name (cptr_type
, NULL
),
1985 gimple_assign_lhs (str_arg_ssa
),
1987 gimple_set_location (stmt
, loc
);
1988 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
1990 build_check_stmt (loc
, gimple_assign_lhs (stmt
), &gsi
,
1991 /*before_p=*/false, /*is_store=*/false, 1);
1993 /* Ensure that iter points to the statement logically following the
1994 one it was initially pointing to. */
1996 /* As *ITER has been advanced to point to the next statement, let's
1997 return true to inform transform_statements that it shouldn't
1998 advance *ITER anymore; otherwises it will skip that next
1999 statement, which wouldn't be instrumented. */
2003 /* Instrument the call to a built-in memory access function that is
2004 pointed to by the iterator ITER.
2006 Upon completion, return TRUE iff *ITER has been advanced to the
2007 statement following the one it was originally pointing to. */
2010 instrument_builtin_call (gimple_stmt_iterator
*iter
)
2012 if (!ASAN_MEMINTRIN
)
2015 bool iter_advanced_p
= false;
2016 gimple call
= gsi_stmt (*iter
);
2018 gcc_checking_assert (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
));
2020 tree callee
= gimple_call_fndecl (call
);
2021 location_t loc
= gimple_location (call
);
2023 if (DECL_FUNCTION_CODE (callee
) == BUILT_IN_STRLEN
)
2024 iter_advanced_p
= instrument_strlen_call (iter
);
2027 asan_mem_ref src0
, src1
, dest
;
2028 asan_mem_ref_init (&src0
, NULL
, 1);
2029 asan_mem_ref_init (&src1
, NULL
, 1);
2030 asan_mem_ref_init (&dest
, NULL
, 1);
2032 tree src0_len
= NULL_TREE
, src1_len
= NULL_TREE
, dest_len
= NULL_TREE
;
2033 bool src0_is_store
= false, src1_is_store
= false,
2034 dest_is_store
= false, dest_is_deref
= false;
2036 if (get_mem_refs_of_builtin_call (call
,
2037 &src0
, &src0_len
, &src0_is_store
,
2038 &src1
, &src1_len
, &src1_is_store
,
2039 &dest
, &dest_len
, &dest_is_store
,
2044 instrument_derefs (iter
, dest
.start
, loc
, dest_is_store
);
2046 iter_advanced_p
= true;
2048 else if (src0_len
|| src1_len
|| dest_len
)
2050 if (src0
.start
!= NULL_TREE
)
2051 instrument_mem_region_access (src0
.start
, src0_len
,
2052 iter
, loc
, /*is_store=*/false);
2053 if (src1
.start
!= NULL_TREE
)
2054 instrument_mem_region_access (src1
.start
, src1_len
,
2055 iter
, loc
, /*is_store=*/false);
2056 if (dest
.start
!= NULL_TREE
)
2057 instrument_mem_region_access (dest
.start
, dest_len
,
2058 iter
, loc
, /*is_store=*/true);
2059 *iter
= gsi_for_stmt (call
);
2061 iter_advanced_p
= true;
2065 return iter_advanced_p
;
2068 /* Instrument the assignment statement ITER if it is subject to
2069 instrumentation. Return TRUE iff instrumentation actually
2070 happened. In that case, the iterator ITER is advanced to the next
2071 logical expression following the one initially pointed to by ITER,
2072 and the relevant memory reference that which access has been
2073 instrumented is added to the memory references hash table. */
2076 maybe_instrument_assignment (gimple_stmt_iterator
*iter
)
2078 gimple s
= gsi_stmt (*iter
);
2080 gcc_assert (gimple_assign_single_p (s
));
2082 tree ref_expr
= NULL_TREE
;
2083 bool is_store
, is_instrumented
= false;
2085 if (gimple_store_p (s
))
2087 ref_expr
= gimple_assign_lhs (s
);
2089 instrument_derefs (iter
, ref_expr
,
2090 gimple_location (s
),
2092 is_instrumented
= true;
2095 if (gimple_assign_load_p (s
))
2097 ref_expr
= gimple_assign_rhs1 (s
);
2099 instrument_derefs (iter
, ref_expr
,
2100 gimple_location (s
),
2102 is_instrumented
= true;
2105 if (is_instrumented
)
2108 return is_instrumented
;
2111 /* Instrument the function call pointed to by the iterator ITER, if it
2112 is subject to instrumentation. At the moment, the only function
2113 calls that are instrumented are some built-in functions that access
2114 memory. Look at instrument_builtin_call to learn more.
2116 Upon completion return TRUE iff *ITER was advanced to the statement
2117 following the one it was originally pointing to. */
2120 maybe_instrument_call (gimple_stmt_iterator
*iter
)
2122 gimple stmt
= gsi_stmt (*iter
);
2123 bool is_builtin
= gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
);
2125 if (is_builtin
&& instrument_builtin_call (iter
))
2128 if (gimple_call_noreturn_p (stmt
))
2132 tree callee
= gimple_call_fndecl (stmt
);
2133 switch (DECL_FUNCTION_CODE (callee
))
2135 case BUILT_IN_UNREACHABLE
:
2137 /* Don't instrument these. */
2141 tree decl
= builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN
);
2142 gimple g
= gimple_build_call (decl
, 0);
2143 gimple_set_location (g
, gimple_location (stmt
));
2144 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
2149 /* Walk each instruction of all basic block and instrument those that
2150 represent memory references: loads, stores, or function calls.
2151 In a given basic block, this function avoids instrumenting memory
2152 references that have already been instrumented. */
2155 transform_statements (void)
2157 basic_block bb
, last_bb
= NULL
;
2158 gimple_stmt_iterator i
;
2159 int saved_last_basic_block
= last_basic_block_for_fn (cfun
);
2161 FOR_EACH_BB_FN (bb
, cfun
)
2163 basic_block prev_bb
= bb
;
2165 if (bb
->index
>= saved_last_basic_block
) continue;
2167 /* Flush the mem ref hash table, if current bb doesn't have
2168 exactly one predecessor, or if that predecessor (skipping
2169 over asan created basic blocks) isn't the last processed
2170 basic block. Thus we effectively flush on extended basic
2171 block boundaries. */
2172 while (single_pred_p (prev_bb
))
2174 prev_bb
= single_pred (prev_bb
);
2175 if (prev_bb
->index
< saved_last_basic_block
)
2178 if (prev_bb
!= last_bb
)
2179 empty_mem_ref_hash_table ();
2182 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
);)
2184 gimple s
= gsi_stmt (i
);
2186 if (has_stmt_been_instrumented_p (s
))
2188 else if (gimple_assign_single_p (s
)
2189 && maybe_instrument_assignment (&i
))
2190 /* Nothing to do as maybe_instrument_assignment advanced
2192 else if (is_gimple_call (s
) && maybe_instrument_call (&i
))
2193 /* Nothing to do as maybe_instrument_call
2194 advanced the iterator I. */;
2197 /* No instrumentation happened.
2199 If the current instruction is a function call that
2200 might free something, let's forget about the memory
2201 references that got instrumented. Otherwise we might
2202 miss some instrumentation opportunities. */
2203 if (is_gimple_call (s
) && !nonfreeing_call_p (s
))
2204 empty_mem_ref_hash_table ();
2210 free_mem_ref_resources ();
2214 __asan_before_dynamic_init (module_name)
2216 __asan_after_dynamic_init ()
2220 asan_dynamic_init_call (bool after_p
)
2222 tree fn
= builtin_decl_implicit (after_p
2223 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2224 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT
);
2225 tree module_name_cst
= NULL_TREE
;
2228 pretty_printer module_name_pp
;
2229 pp_string (&module_name_pp
, main_input_filename
);
2231 if (shadow_ptr_types
[0] == NULL_TREE
)
2232 asan_init_shadow_ptr_types ();
2233 module_name_cst
= asan_pp_string (&module_name_pp
);
2234 module_name_cst
= fold_convert (const_ptr_type_node
,
2238 return build_call_expr (fn
, after_p
? 0 : 1, module_name_cst
);
2242 struct __asan_global
2246 uptr __size_with_redzone;
2248 const void *__module_name;
2249 uptr __has_dynamic_init;
2253 asan_global_struct (void)
2255 static const char *field_names
[6]
2256 = { "__beg", "__size", "__size_with_redzone",
2257 "__name", "__module_name", "__has_dynamic_init" };
2258 tree fields
[6], ret
;
2261 ret
= make_node (RECORD_TYPE
);
2262 for (i
= 0; i
< 6; i
++)
2265 = build_decl (UNKNOWN_LOCATION
, FIELD_DECL
,
2266 get_identifier (field_names
[i
]),
2267 (i
== 0 || i
== 3) ? const_ptr_type_node
2268 : pointer_sized_int_node
);
2269 DECL_CONTEXT (fields
[i
]) = ret
;
2271 DECL_CHAIN (fields
[i
- 1]) = fields
[i
];
2273 TYPE_FIELDS (ret
) = fields
[0];
2274 TYPE_NAME (ret
) = get_identifier ("__asan_global");
2279 /* Append description of a single global DECL into vector V.
2280 TYPE is __asan_global struct type as returned by asan_global_struct. */
2283 asan_add_global (tree decl
, tree type
, vec
<constructor_elt
, va_gc
> *v
)
2285 tree init
, uptr
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
)));
2286 unsigned HOST_WIDE_INT size
;
2287 tree str_cst
, module_name_cst
, refdecl
= decl
;
2288 vec
<constructor_elt
, va_gc
> *vinner
= NULL
;
2290 pretty_printer asan_pp
, module_name_pp
;
2292 if (DECL_NAME (decl
))
2293 pp_tree_identifier (&asan_pp
, DECL_NAME (decl
));
2295 pp_string (&asan_pp
, "<unknown>");
2296 str_cst
= asan_pp_string (&asan_pp
);
2298 pp_string (&module_name_pp
, main_input_filename
);
2299 module_name_cst
= asan_pp_string (&module_name_pp
);
2301 if (asan_needs_local_alias (decl
))
2304 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASAN", vec_safe_length (v
) + 1);
2305 refdecl
= build_decl (DECL_SOURCE_LOCATION (decl
),
2306 VAR_DECL
, get_identifier (buf
), TREE_TYPE (decl
));
2307 TREE_ADDRESSABLE (refdecl
) = TREE_ADDRESSABLE (decl
);
2308 TREE_READONLY (refdecl
) = TREE_READONLY (decl
);
2309 TREE_THIS_VOLATILE (refdecl
) = TREE_THIS_VOLATILE (decl
);
2310 DECL_GIMPLE_REG_P (refdecl
) = DECL_GIMPLE_REG_P (decl
);
2311 DECL_ARTIFICIAL (refdecl
) = DECL_ARTIFICIAL (decl
);
2312 DECL_IGNORED_P (refdecl
) = DECL_IGNORED_P (decl
);
2313 TREE_STATIC (refdecl
) = 1;
2314 TREE_PUBLIC (refdecl
) = 0;
2315 TREE_USED (refdecl
) = 1;
2316 assemble_alias (refdecl
, DECL_ASSEMBLER_NAME (decl
));
2319 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2320 fold_convert (const_ptr_type_node
,
2321 build_fold_addr_expr (refdecl
)));
2322 size
= tree_to_uhwi (DECL_SIZE_UNIT (decl
));
2323 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
, build_int_cst (uptr
, size
));
2324 size
+= asan_red_zone_size (size
);
2325 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
, build_int_cst (uptr
, size
));
2326 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2327 fold_convert (const_ptr_type_node
, str_cst
));
2328 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2329 fold_convert (const_ptr_type_node
, module_name_cst
));
2330 varpool_node
*vnode
= varpool_get_node (decl
);
2331 int has_dynamic_init
= vnode
? vnode
->dynamically_initialized
: 0;
2332 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2333 build_int_cst (uptr
, has_dynamic_init
));
2334 init
= build_constructor (type
, vinner
);
2335 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, init
);
2338 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2340 initialize_sanitizer_builtins (void)
2344 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT
))
2347 tree BT_FN_VOID
= build_function_type_list (void_type_node
, NULL_TREE
);
2349 = build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
2350 tree BT_FN_VOID_CONST_PTR
2351 = build_function_type_list (void_type_node
, const_ptr_type_node
, NULL_TREE
);
2352 tree BT_FN_VOID_PTR_PTR
2353 = build_function_type_list (void_type_node
, ptr_type_node
,
2354 ptr_type_node
, NULL_TREE
);
2355 tree BT_FN_VOID_PTR_PTR_PTR
2356 = build_function_type_list (void_type_node
, ptr_type_node
,
2357 ptr_type_node
, ptr_type_node
, NULL_TREE
);
2358 tree BT_FN_VOID_PTR_PTRMODE
2359 = build_function_type_list (void_type_node
, ptr_type_node
,
2360 pointer_sized_int_node
, NULL_TREE
);
2362 = build_function_type_list (void_type_node
, integer_type_node
, NULL_TREE
);
2363 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT
[5];
2364 tree BT_FN_IX_CONST_VPTR_INT
[5];
2365 tree BT_FN_IX_VPTR_IX_INT
[5];
2366 tree BT_FN_VOID_VPTR_IX_INT
[5];
2368 = build_pointer_type (build_qualified_type (void_type_node
,
2369 TYPE_QUAL_VOLATILE
));
2371 = build_pointer_type (build_qualified_type (void_type_node
,
2375 = lang_hooks
.types
.type_for_size (BOOL_TYPE_SIZE
, 1);
2377 for (i
= 0; i
< 5; i
++)
2379 tree ix
= build_nonstandard_integer_type (BITS_PER_UNIT
* (1 << i
), 1);
2380 BT_FN_BOOL_VPTR_PTR_IX_INT_INT
[i
]
2381 = build_function_type_list (boolt
, vptr
, ptr_type_node
, ix
,
2382 integer_type_node
, integer_type_node
,
2384 BT_FN_IX_CONST_VPTR_INT
[i
]
2385 = build_function_type_list (ix
, cvptr
, integer_type_node
, NULL_TREE
);
2386 BT_FN_IX_VPTR_IX_INT
[i
]
2387 = build_function_type_list (ix
, vptr
, ix
, integer_type_node
,
2389 BT_FN_VOID_VPTR_IX_INT
[i
]
2390 = build_function_type_list (void_type_node
, vptr
, ix
,
2391 integer_type_node
, NULL_TREE
);
2393 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2394 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2395 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2396 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2397 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2398 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2399 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2400 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2401 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2402 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2403 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2404 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2405 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2406 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2407 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2408 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2409 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2410 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2411 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2412 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2413 #undef ATTR_NOTHROW_LEAF_LIST
2414 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2415 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2416 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2417 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2418 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2419 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2420 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2421 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2422 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2423 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2424 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2425 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2426 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2427 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2428 #undef DEF_SANITIZER_BUILTIN
2429 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2430 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2431 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2432 set_call_expr_flags (decl, ATTRS); \
2433 set_builtin_decl (ENUM, decl, true);
2435 #include "sanitizer.def"
2437 #undef DEF_SANITIZER_BUILTIN
2440 /* Called via htab_traverse. Count number of emitted
2441 STRING_CSTs in the constant hash table. */
2444 count_string_csts (void **slot
, void *data
)
2446 struct constant_descriptor_tree
*desc
2447 = (struct constant_descriptor_tree
*) *slot
;
2448 if (TREE_CODE (desc
->value
) == STRING_CST
2449 && TREE_ASM_WRITTEN (desc
->value
)
2450 && asan_protect_global (desc
->value
))
2451 ++*((unsigned HOST_WIDE_INT
*) data
);
2455 /* Helper structure to pass two parameters to
2458 struct asan_add_string_csts_data
2461 vec
<constructor_elt
, va_gc
> *v
;
2464 /* Called via htab_traverse. Call asan_add_global
2465 on emitted STRING_CSTs from the constant hash table. */
2468 add_string_csts (void **slot
, void *data
)
2470 struct constant_descriptor_tree
*desc
2471 = (struct constant_descriptor_tree
*) *slot
;
2472 if (TREE_CODE (desc
->value
) == STRING_CST
2473 && TREE_ASM_WRITTEN (desc
->value
)
2474 && asan_protect_global (desc
->value
))
2476 struct asan_add_string_csts_data
*aascd
2477 = (struct asan_add_string_csts_data
*) data
;
2478 asan_add_global (SYMBOL_REF_DECL (XEXP (desc
->rtl
, 0)),
2479 aascd
->type
, aascd
->v
);
2484 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2485 invoke ggc_collect. */
2486 static GTY(()) tree asan_ctor_statements
;
2488 /* Module-level instrumentation.
2489 - Insert __asan_init_vN() into the list of CTORs.
2490 - TODO: insert redzones around globals.
2494 asan_finish_file (void)
2496 varpool_node
*vnode
;
2497 unsigned HOST_WIDE_INT gcount
= 0;
2499 if (shadow_ptr_types
[0] == NULL_TREE
)
2500 asan_init_shadow_ptr_types ();
2501 /* Avoid instrumenting code in the asan ctors/dtors.
2502 We don't need to insert padding after the description strings,
2503 nor after .LASAN* array. */
2504 flag_sanitize
&= ~SANITIZE_ADDRESS
;
2506 tree fn
= builtin_decl_implicit (BUILT_IN_ASAN_INIT
);
2507 append_to_statement_list (build_call_expr (fn
, 0), &asan_ctor_statements
);
2508 FOR_EACH_DEFINED_VARIABLE (vnode
)
2509 if (TREE_ASM_WRITTEN (vnode
->decl
)
2510 && asan_protect_global (vnode
->decl
))
2512 htab_t const_desc_htab
= constant_pool_htab ();
2513 htab_traverse (const_desc_htab
, count_string_csts
, &gcount
);
2516 tree type
= asan_global_struct (), var
, ctor
;
2517 tree dtor_statements
= NULL_TREE
;
2518 vec
<constructor_elt
, va_gc
> *v
;
2521 type
= build_array_type_nelts (type
, gcount
);
2522 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASAN", 0);
2523 var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
, get_identifier (buf
),
2525 TREE_STATIC (var
) = 1;
2526 TREE_PUBLIC (var
) = 0;
2527 DECL_ARTIFICIAL (var
) = 1;
2528 DECL_IGNORED_P (var
) = 1;
2529 vec_alloc (v
, gcount
);
2530 FOR_EACH_DEFINED_VARIABLE (vnode
)
2531 if (TREE_ASM_WRITTEN (vnode
->decl
)
2532 && asan_protect_global (vnode
->decl
))
2533 asan_add_global (vnode
->decl
, TREE_TYPE (type
), v
);
2534 struct asan_add_string_csts_data aascd
;
2535 aascd
.type
= TREE_TYPE (type
);
2537 htab_traverse (const_desc_htab
, add_string_csts
, &aascd
);
2538 ctor
= build_constructor (type
, v
);
2539 TREE_CONSTANT (ctor
) = 1;
2540 TREE_STATIC (ctor
) = 1;
2541 DECL_INITIAL (var
) = ctor
;
2542 varpool_assemble_decl (varpool_node_for_decl (var
));
2544 fn
= builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS
);
2545 tree gcount_tree
= build_int_cst (pointer_sized_int_node
, gcount
);
2546 append_to_statement_list (build_call_expr (fn
, 2,
2547 build_fold_addr_expr (var
),
2549 &asan_ctor_statements
);
2551 fn
= builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS
);
2552 append_to_statement_list (build_call_expr (fn
, 2,
2553 build_fold_addr_expr (var
),
2556 cgraph_build_static_cdtor ('D', dtor_statements
,
2557 MAX_RESERVED_INIT_PRIORITY
- 1);
2559 cgraph_build_static_cdtor ('I', asan_ctor_statements
,
2560 MAX_RESERVED_INIT_PRIORITY
- 1);
2561 flag_sanitize
|= SANITIZE_ADDRESS
;
2564 /* Instrument the current function. */
2567 asan_instrument (void)
2569 if (shadow_ptr_types
[0] == NULL_TREE
)
2570 asan_init_shadow_ptr_types ();
2571 transform_statements ();
2578 return (flag_sanitize
& SANITIZE_ADDRESS
) != 0
2579 && !lookup_attribute ("no_sanitize_address",
2580 DECL_ATTRIBUTES (current_function_decl
));
2585 const pass_data pass_data_asan
=
2587 GIMPLE_PASS
, /* type */
2589 OPTGROUP_NONE
, /* optinfo_flags */
2590 true, /* has_execute */
2591 TV_NONE
, /* tv_id */
2592 ( PROP_ssa
| PROP_cfg
| PROP_gimple_leh
), /* properties_required */
2593 0, /* properties_provided */
2594 0, /* properties_destroyed */
2595 0, /* todo_flags_start */
2596 TODO_update_ssa
, /* todo_flags_finish */
2599 class pass_asan
: public gimple_opt_pass
2602 pass_asan (gcc::context
*ctxt
)
2603 : gimple_opt_pass (pass_data_asan
, ctxt
)
2606 /* opt_pass methods: */
2607 opt_pass
* clone () { return new pass_asan (m_ctxt
); }
2608 virtual bool gate (function
*) { return gate_asan (); }
2609 virtual unsigned int execute (function
*) { return asan_instrument (); }
2611 }; // class pass_asan
2616 make_pass_asan (gcc::context
*ctxt
)
2618 return new pass_asan (ctxt
);
2623 const pass_data pass_data_asan_O0
=
2625 GIMPLE_PASS
, /* type */
2627 OPTGROUP_NONE
, /* optinfo_flags */
2628 true, /* has_execute */
2629 TV_NONE
, /* tv_id */
2630 ( PROP_ssa
| PROP_cfg
| PROP_gimple_leh
), /* properties_required */
2631 0, /* properties_provided */
2632 0, /* properties_destroyed */
2633 0, /* todo_flags_start */
2634 TODO_update_ssa
, /* todo_flags_finish */
2637 class pass_asan_O0
: public gimple_opt_pass
2640 pass_asan_O0 (gcc::context
*ctxt
)
2641 : gimple_opt_pass (pass_data_asan_O0
, ctxt
)
2644 /* opt_pass methods: */
2645 virtual bool gate (function
*) { return !optimize
&& gate_asan (); }
2646 virtual unsigned int execute (function
*) { return asan_instrument (); }
2648 }; // class pass_asan_O0
2653 make_pass_asan_O0 (gcc::context
*ctxt
)
2655 return new pass_asan_O0 (ctxt
);
2658 /* Perform optimization of sanitize functions. */
2662 const pass_data pass_data_sanopt
=
2664 GIMPLE_PASS
, /* type */
2665 "sanopt", /* name */
2666 OPTGROUP_NONE
, /* optinfo_flags */
2667 true, /* has_execute */
2668 TV_NONE
, /* tv_id */
2669 ( PROP_ssa
| PROP_cfg
| PROP_gimple_leh
), /* properties_required */
2670 0, /* properties_provided */
2671 0, /* properties_destroyed */
2672 0, /* todo_flags_start */
2673 TODO_update_ssa
, /* todo_flags_finish */
2676 class pass_sanopt
: public gimple_opt_pass
2679 pass_sanopt (gcc::context
*ctxt
)
2680 : gimple_opt_pass (pass_data_sanopt
, ctxt
)
2683 /* opt_pass methods: */
2684 virtual bool gate (function
*) { return flag_sanitize
; }
2685 virtual unsigned int execute (function
*);
2687 }; // class pass_sanopt
2690 pass_sanopt::execute (function
*fun
)
2694 FOR_EACH_BB_FN (bb
, fun
)
2696 gimple_stmt_iterator gsi
;
2697 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2699 gimple stmt
= gsi_stmt (gsi
);
2701 if (!is_gimple_call (stmt
))
2704 if (gimple_call_internal_p (stmt
))
2705 switch (gimple_call_internal_fn (stmt
))
2707 case IFN_UBSAN_NULL
:
2708 ubsan_expand_null_ifn (gsi
);
2714 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2716 fprintf (dump_file
, "Optimized\n ");
2717 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
2718 fprintf (dump_file
, "\n");
2728 make_pass_sanopt (gcc::context
*ctxt
)
2730 return new pass_sanopt (ctxt
);
2733 #include "gt-asan.h"