Factor unrelated declarations out of tree.h.
[gcc.git] / gcc / asan.c
1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2013 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "gimplify.h"
28 #include "gimple-iterator.h"
29 #include "calls.h"
30 #include "varasm.h"
31 #include "stor-layout.h"
32 #include "tree-iterator.h"
33 #include "cgraph.h"
34 #include "stringpool.h"
35 #include "tree-ssanames.h"
36 #include "tree-pass.h"
37 #include "asan.h"
38 #include "gimple-pretty-print.h"
39 #include "target.h"
40 #include "expr.h"
41 #include "optabs.h"
42 #include "output.h"
43 #include "tm_p.h"
44 #include "langhooks.h"
45 #include "hash-table.h"
46 #include "alloc-pool.h"
47 #include "cfgloop.h"
48 #include "gimple-builder.h"
49 #include "ubsan.h"
50
51 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
52 with <2x slowdown on average.
53
54 The tool consists of two parts:
55 instrumentation module (this file) and a run-time library.
56 The instrumentation module adds a run-time check before every memory insn.
57 For a 8- or 16- byte load accessing address X:
58 ShadowAddr = (X >> 3) + Offset
59 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
60 if (ShadowValue)
61 __asan_report_load8(X);
62 For a load of N bytes (N=1, 2 or 4) from address X:
63 ShadowAddr = (X >> 3) + Offset
64 ShadowValue = *(char*)ShadowAddr;
65 if (ShadowValue)
66 if ((X & 7) + N - 1 > ShadowValue)
67 __asan_report_loadN(X);
68 Stores are instrumented similarly, but using __asan_report_storeN functions.
69 A call too __asan_init_vN() is inserted to the list of module CTORs.
70 N is the version number of the AddressSanitizer API. The changes between the
71 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
72
73 The run-time library redefines malloc (so that redzone are inserted around
74 the allocated memory) and free (so that reuse of free-ed memory is delayed),
75 provides __asan_report* and __asan_init_vN functions.
76
77 Read more:
78 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
79
80 The current implementation supports detection of out-of-bounds and
81 use-after-free in the heap, on the stack and for global variables.
82
83 [Protection of stack variables]
84
85 To understand how detection of out-of-bounds and use-after-free works
86 for stack variables, lets look at this example on x86_64 where the
87 stack grows downward:
88
89 int
90 foo ()
91 {
92 char a[23] = {0};
93 int b[2] = {0};
94
95 a[5] = 1;
96 b[1] = 2;
97
98 return a[5] + b[1];
99 }
100
101 For this function, the stack protected by asan will be organized as
102 follows, from the top of the stack to the bottom:
103
104 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
105
106 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
107 the next slot be 32 bytes aligned; this one is called Partial
108 Redzone; this 32 bytes alignment is an asan constraint]
109
110 Slot 3/ [24 bytes for variable 'a']
111
112 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
113
114 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
115
116 Slot 6/ [8 bytes for variable 'b']
117
118 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
119 'LEFT RedZone']
120
121 The 32 bytes of LEFT red zone at the bottom of the stack can be
122 decomposed as such:
123
124 1/ The first 8 bytes contain a magical asan number that is always
125 0x41B58AB3.
126
127 2/ The following 8 bytes contains a pointer to a string (to be
128 parsed at runtime by the runtime asan library), which format is
129 the following:
130
131 "<function-name> <space> <num-of-variables-on-the-stack>
132 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
133 <length-of-var-in-bytes> ){n} "
134
135 where '(...){n}' means the content inside the parenthesis occurs 'n'
136 times, with 'n' being the number of variables on the stack.
137
138 3/ The following 8 bytes contain the PC of the current function which
139 will be used by the run-time library to print an error message.
140
141 4/ The following 8 bytes are reserved for internal use by the run-time.
142
143 The shadow memory for that stack layout is going to look like this:
144
145 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
146 The F1 byte pattern is a magic number called
147 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
148 the memory for that shadow byte is part of a the LEFT red zone
149 intended to seat at the bottom of the variables on the stack.
150
151 - content of shadow memory 8 bytes for slots 6 and 5:
152 0xF4F4F400. The F4 byte pattern is a magic number
153 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
154 memory region for this shadow byte is a PARTIAL red zone
155 intended to pad a variable A, so that the slot following
156 {A,padding} is 32 bytes aligned.
157
158 Note that the fact that the least significant byte of this
159 shadow memory content is 00 means that 8 bytes of its
160 corresponding memory (which corresponds to the memory of
161 variable 'b') is addressable.
162
163 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
164 The F2 byte pattern is a magic number called
165 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
166 region for this shadow byte is a MIDDLE red zone intended to
167 seat between two 32 aligned slots of {variable,padding}.
168
169 - content of shadow memory 8 bytes for slot 3 and 2:
170 0xF4000000. This represents is the concatenation of
171 variable 'a' and the partial red zone following it, like what we
172 had for variable 'b'. The least significant 3 bytes being 00
173 means that the 3 bytes of variable 'a' are addressable.
174
175 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
176 The F3 byte pattern is a magic number called
177 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
178 region for this shadow byte is a RIGHT red zone intended to seat
179 at the top of the variables of the stack.
180
181 Note that the real variable layout is done in expand_used_vars in
182 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
183 stack variables as well as the different red zones, emits some
184 prologue code to populate the shadow memory as to poison (mark as
185 non-accessible) the regions of the red zones and mark the regions of
186 stack variables as accessible, and emit some epilogue code to
187 un-poison (mark as accessible) the regions of red zones right before
188 the function exits.
189
190 [Protection of global variables]
191
192 The basic idea is to insert a red zone between two global variables
193 and install a constructor function that calls the asan runtime to do
194 the populating of the relevant shadow memory regions at load time.
195
196 So the global variables are laid out as to insert a red zone between
197 them. The size of the red zones is so that each variable starts on a
198 32 bytes boundary.
199
200 Then a constructor function is installed so that, for each global
201 variable, it calls the runtime asan library function
202 __asan_register_globals_with an instance of this type:
203
204 struct __asan_global
205 {
206 // Address of the beginning of the global variable.
207 const void *__beg;
208
209 // Initial size of the global variable.
210 uptr __size;
211
212 // Size of the global variable + size of the red zone. This
213 // size is 32 bytes aligned.
214 uptr __size_with_redzone;
215
216 // Name of the global variable.
217 const void *__name;
218
219 // Name of the module where the global variable is declared.
220 const void *__module_name;
221
222 // This is always set to NULL for now.
223 uptr __has_dynamic_init;
224 }
225
226 A destructor function that calls the runtime asan library function
227 _asan_unregister_globals is also installed. */
228
229 alias_set_type asan_shadow_set = -1;
230
231 /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
232 alias set is used for all shadow memory accesses. */
233 static GTY(()) tree shadow_ptr_types[2];
234
235 /* Hashtable support for memory references used by gimple
236 statements. */
237
238 /* This type represents a reference to a memory region. */
239 struct asan_mem_ref
240 {
241 /* The expression of the beginning of the memory region. */
242 tree start;
243
244 /* The size of the access (can be 1, 2, 4, 8, 16 for now). */
245 char access_size;
246 };
247
248 static alloc_pool asan_mem_ref_alloc_pool;
249
250 /* This creates the alloc pool used to store the instances of
251 asan_mem_ref that are stored in the hash table asan_mem_ref_ht. */
252
253 static alloc_pool
254 asan_mem_ref_get_alloc_pool ()
255 {
256 if (asan_mem_ref_alloc_pool == NULL)
257 asan_mem_ref_alloc_pool = create_alloc_pool ("asan_mem_ref",
258 sizeof (asan_mem_ref),
259 10);
260 return asan_mem_ref_alloc_pool;
261
262 }
263
264 /* Initializes an instance of asan_mem_ref. */
265
266 static void
267 asan_mem_ref_init (asan_mem_ref *ref, tree start, char access_size)
268 {
269 ref->start = start;
270 ref->access_size = access_size;
271 }
272
273 /* Allocates memory for an instance of asan_mem_ref into the memory
274 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
275 START is the address of (or the expression pointing to) the
276 beginning of memory reference. ACCESS_SIZE is the size of the
277 access to the referenced memory. */
278
279 static asan_mem_ref*
280 asan_mem_ref_new (tree start, char access_size)
281 {
282 asan_mem_ref *ref =
283 (asan_mem_ref *) pool_alloc (asan_mem_ref_get_alloc_pool ());
284
285 asan_mem_ref_init (ref, start, access_size);
286 return ref;
287 }
288
289 /* This builds and returns a pointer to the end of the memory region
290 that starts at START and of length LEN. */
291
292 tree
293 asan_mem_ref_get_end (tree start, tree len)
294 {
295 if (len == NULL_TREE || integer_zerop (len))
296 return start;
297
298 return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
299 }
300
301 /* Return a tree expression that represents the end of the referenced
302 memory region. Beware that this function can actually build a new
303 tree expression. */
304
305 tree
306 asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
307 {
308 return asan_mem_ref_get_end (ref->start, len);
309 }
310
311 struct asan_mem_ref_hasher
312 : typed_noop_remove <asan_mem_ref>
313 {
314 typedef asan_mem_ref value_type;
315 typedef asan_mem_ref compare_type;
316
317 static inline hashval_t hash (const value_type *);
318 static inline bool equal (const value_type *, const compare_type *);
319 };
320
321 /* Hash a memory reference. */
322
323 inline hashval_t
324 asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
325 {
326 hashval_t h = iterative_hash_expr (mem_ref->start, 0);
327 h = iterative_hash_hashval_t (h, mem_ref->access_size);
328 return h;
329 }
330
331 /* Compare two memory references. We accept the length of either
332 memory references to be NULL_TREE. */
333
334 inline bool
335 asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
336 const asan_mem_ref *m2)
337 {
338 return (m1->access_size == m2->access_size
339 && operand_equal_p (m1->start, m2->start, 0));
340 }
341
342 static hash_table <asan_mem_ref_hasher> asan_mem_ref_ht;
343
344 /* Returns a reference to the hash table containing memory references.
345 This function ensures that the hash table is created. Note that
346 this hash table is updated by the function
347 update_mem_ref_hash_table. */
348
349 static hash_table <asan_mem_ref_hasher> &
350 get_mem_ref_hash_table ()
351 {
352 if (!asan_mem_ref_ht.is_created ())
353 asan_mem_ref_ht.create (10);
354
355 return asan_mem_ref_ht;
356 }
357
358 /* Clear all entries from the memory references hash table. */
359
360 static void
361 empty_mem_ref_hash_table ()
362 {
363 if (asan_mem_ref_ht.is_created ())
364 asan_mem_ref_ht.empty ();
365 }
366
367 /* Free the memory references hash table. */
368
369 static void
370 free_mem_ref_resources ()
371 {
372 if (asan_mem_ref_ht.is_created ())
373 asan_mem_ref_ht.dispose ();
374
375 if (asan_mem_ref_alloc_pool)
376 {
377 free_alloc_pool (asan_mem_ref_alloc_pool);
378 asan_mem_ref_alloc_pool = NULL;
379 }
380 }
381
382 /* Return true iff the memory reference REF has been instrumented. */
383
384 static bool
385 has_mem_ref_been_instrumented (tree ref, char access_size)
386 {
387 asan_mem_ref r;
388 asan_mem_ref_init (&r, ref, access_size);
389
390 return (get_mem_ref_hash_table ().find (&r) != NULL);
391 }
392
393 /* Return true iff the memory reference REF has been instrumented. */
394
395 static bool
396 has_mem_ref_been_instrumented (const asan_mem_ref *ref)
397 {
398 return has_mem_ref_been_instrumented (ref->start, ref->access_size);
399 }
400
401 /* Return true iff access to memory region starting at REF and of
402 length LEN has been instrumented. */
403
404 static bool
405 has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
406 {
407 /* First let's see if the address of the beginning of REF has been
408 instrumented. */
409 if (!has_mem_ref_been_instrumented (ref))
410 return false;
411
412 if (len != 0)
413 {
414 /* Let's see if the end of the region has been instrumented. */
415 if (!has_mem_ref_been_instrumented (asan_mem_ref_get_end (ref, len),
416 ref->access_size))
417 return false;
418 }
419 return true;
420 }
421
422 /* Set REF to the memory reference present in a gimple assignment
423 ASSIGNMENT. Return true upon successful completion, false
424 otherwise. */
425
426 static bool
427 get_mem_ref_of_assignment (const gimple assignment,
428 asan_mem_ref *ref,
429 bool *ref_is_store)
430 {
431 gcc_assert (gimple_assign_single_p (assignment));
432
433 if (gimple_store_p (assignment)
434 && !gimple_clobber_p (assignment))
435 {
436 ref->start = gimple_assign_lhs (assignment);
437 *ref_is_store = true;
438 }
439 else if (gimple_assign_load_p (assignment))
440 {
441 ref->start = gimple_assign_rhs1 (assignment);
442 *ref_is_store = false;
443 }
444 else
445 return false;
446
447 ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
448 return true;
449 }
450
451 /* Return the memory references contained in a gimple statement
452 representing a builtin call that has to do with memory access. */
453
454 static bool
455 get_mem_refs_of_builtin_call (const gimple call,
456 asan_mem_ref *src0,
457 tree *src0_len,
458 bool *src0_is_store,
459 asan_mem_ref *src1,
460 tree *src1_len,
461 bool *src1_is_store,
462 asan_mem_ref *dst,
463 tree *dst_len,
464 bool *dst_is_store,
465 bool *dest_is_deref)
466 {
467 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
468
469 tree callee = gimple_call_fndecl (call);
470 tree source0 = NULL_TREE, source1 = NULL_TREE,
471 dest = NULL_TREE, len = NULL_TREE;
472 bool is_store = true, got_reference_p = false;
473 char access_size = 1;
474
475 switch (DECL_FUNCTION_CODE (callee))
476 {
477 /* (s, s, n) style memops. */
478 case BUILT_IN_BCMP:
479 case BUILT_IN_MEMCMP:
480 source0 = gimple_call_arg (call, 0);
481 source1 = gimple_call_arg (call, 1);
482 len = gimple_call_arg (call, 2);
483 break;
484
485 /* (src, dest, n) style memops. */
486 case BUILT_IN_BCOPY:
487 source0 = gimple_call_arg (call, 0);
488 dest = gimple_call_arg (call, 1);
489 len = gimple_call_arg (call, 2);
490 break;
491
492 /* (dest, src, n) style memops. */
493 case BUILT_IN_MEMCPY:
494 case BUILT_IN_MEMCPY_CHK:
495 case BUILT_IN_MEMMOVE:
496 case BUILT_IN_MEMMOVE_CHK:
497 case BUILT_IN_MEMPCPY:
498 case BUILT_IN_MEMPCPY_CHK:
499 dest = gimple_call_arg (call, 0);
500 source0 = gimple_call_arg (call, 1);
501 len = gimple_call_arg (call, 2);
502 break;
503
504 /* (dest, n) style memops. */
505 case BUILT_IN_BZERO:
506 dest = gimple_call_arg (call, 0);
507 len = gimple_call_arg (call, 1);
508 break;
509
510 /* (dest, x, n) style memops*/
511 case BUILT_IN_MEMSET:
512 case BUILT_IN_MEMSET_CHK:
513 dest = gimple_call_arg (call, 0);
514 len = gimple_call_arg (call, 2);
515 break;
516
517 case BUILT_IN_STRLEN:
518 source0 = gimple_call_arg (call, 0);
519 len = gimple_call_lhs (call);
520 break ;
521
522 /* And now the __atomic* and __sync builtins.
523 These are handled differently from the classical memory memory
524 access builtins above. */
525
526 case BUILT_IN_ATOMIC_LOAD_1:
527 case BUILT_IN_ATOMIC_LOAD_2:
528 case BUILT_IN_ATOMIC_LOAD_4:
529 case BUILT_IN_ATOMIC_LOAD_8:
530 case BUILT_IN_ATOMIC_LOAD_16:
531 is_store = false;
532 /* fall through. */
533
534 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
535 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
536 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
537 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
538 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
539
540 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
541 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
542 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
543 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
544 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
545
546 case BUILT_IN_SYNC_FETCH_AND_OR_1:
547 case BUILT_IN_SYNC_FETCH_AND_OR_2:
548 case BUILT_IN_SYNC_FETCH_AND_OR_4:
549 case BUILT_IN_SYNC_FETCH_AND_OR_8:
550 case BUILT_IN_SYNC_FETCH_AND_OR_16:
551
552 case BUILT_IN_SYNC_FETCH_AND_AND_1:
553 case BUILT_IN_SYNC_FETCH_AND_AND_2:
554 case BUILT_IN_SYNC_FETCH_AND_AND_4:
555 case BUILT_IN_SYNC_FETCH_AND_AND_8:
556 case BUILT_IN_SYNC_FETCH_AND_AND_16:
557
558 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
559 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
560 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
561 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
562 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
563
564 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
565 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
566 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
567 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
568
569 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
570 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
571 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
572 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
573 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
574
575 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
576 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
577 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
578 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
579 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
580
581 case BUILT_IN_SYNC_OR_AND_FETCH_1:
582 case BUILT_IN_SYNC_OR_AND_FETCH_2:
583 case BUILT_IN_SYNC_OR_AND_FETCH_4:
584 case BUILT_IN_SYNC_OR_AND_FETCH_8:
585 case BUILT_IN_SYNC_OR_AND_FETCH_16:
586
587 case BUILT_IN_SYNC_AND_AND_FETCH_1:
588 case BUILT_IN_SYNC_AND_AND_FETCH_2:
589 case BUILT_IN_SYNC_AND_AND_FETCH_4:
590 case BUILT_IN_SYNC_AND_AND_FETCH_8:
591 case BUILT_IN_SYNC_AND_AND_FETCH_16:
592
593 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
594 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
595 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
596 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
597 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
598
599 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
600 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
601 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
602 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
603
604 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
605 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
606 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
607 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
608 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
609
610 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
611 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
612 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
613 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
614 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
615
616 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
617 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
618 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
619 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
620 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
621
622 case BUILT_IN_SYNC_LOCK_RELEASE_1:
623 case BUILT_IN_SYNC_LOCK_RELEASE_2:
624 case BUILT_IN_SYNC_LOCK_RELEASE_4:
625 case BUILT_IN_SYNC_LOCK_RELEASE_8:
626 case BUILT_IN_SYNC_LOCK_RELEASE_16:
627
628 case BUILT_IN_ATOMIC_EXCHANGE_1:
629 case BUILT_IN_ATOMIC_EXCHANGE_2:
630 case BUILT_IN_ATOMIC_EXCHANGE_4:
631 case BUILT_IN_ATOMIC_EXCHANGE_8:
632 case BUILT_IN_ATOMIC_EXCHANGE_16:
633
634 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
635 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
636 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
637 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
638 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
639
640 case BUILT_IN_ATOMIC_STORE_1:
641 case BUILT_IN_ATOMIC_STORE_2:
642 case BUILT_IN_ATOMIC_STORE_4:
643 case BUILT_IN_ATOMIC_STORE_8:
644 case BUILT_IN_ATOMIC_STORE_16:
645
646 case BUILT_IN_ATOMIC_ADD_FETCH_1:
647 case BUILT_IN_ATOMIC_ADD_FETCH_2:
648 case BUILT_IN_ATOMIC_ADD_FETCH_4:
649 case BUILT_IN_ATOMIC_ADD_FETCH_8:
650 case BUILT_IN_ATOMIC_ADD_FETCH_16:
651
652 case BUILT_IN_ATOMIC_SUB_FETCH_1:
653 case BUILT_IN_ATOMIC_SUB_FETCH_2:
654 case BUILT_IN_ATOMIC_SUB_FETCH_4:
655 case BUILT_IN_ATOMIC_SUB_FETCH_8:
656 case BUILT_IN_ATOMIC_SUB_FETCH_16:
657
658 case BUILT_IN_ATOMIC_AND_FETCH_1:
659 case BUILT_IN_ATOMIC_AND_FETCH_2:
660 case BUILT_IN_ATOMIC_AND_FETCH_4:
661 case BUILT_IN_ATOMIC_AND_FETCH_8:
662 case BUILT_IN_ATOMIC_AND_FETCH_16:
663
664 case BUILT_IN_ATOMIC_NAND_FETCH_1:
665 case BUILT_IN_ATOMIC_NAND_FETCH_2:
666 case BUILT_IN_ATOMIC_NAND_FETCH_4:
667 case BUILT_IN_ATOMIC_NAND_FETCH_8:
668 case BUILT_IN_ATOMIC_NAND_FETCH_16:
669
670 case BUILT_IN_ATOMIC_XOR_FETCH_1:
671 case BUILT_IN_ATOMIC_XOR_FETCH_2:
672 case BUILT_IN_ATOMIC_XOR_FETCH_4:
673 case BUILT_IN_ATOMIC_XOR_FETCH_8:
674 case BUILT_IN_ATOMIC_XOR_FETCH_16:
675
676 case BUILT_IN_ATOMIC_OR_FETCH_1:
677 case BUILT_IN_ATOMIC_OR_FETCH_2:
678 case BUILT_IN_ATOMIC_OR_FETCH_4:
679 case BUILT_IN_ATOMIC_OR_FETCH_8:
680 case BUILT_IN_ATOMIC_OR_FETCH_16:
681
682 case BUILT_IN_ATOMIC_FETCH_ADD_1:
683 case BUILT_IN_ATOMIC_FETCH_ADD_2:
684 case BUILT_IN_ATOMIC_FETCH_ADD_4:
685 case BUILT_IN_ATOMIC_FETCH_ADD_8:
686 case BUILT_IN_ATOMIC_FETCH_ADD_16:
687
688 case BUILT_IN_ATOMIC_FETCH_SUB_1:
689 case BUILT_IN_ATOMIC_FETCH_SUB_2:
690 case BUILT_IN_ATOMIC_FETCH_SUB_4:
691 case BUILT_IN_ATOMIC_FETCH_SUB_8:
692 case BUILT_IN_ATOMIC_FETCH_SUB_16:
693
694 case BUILT_IN_ATOMIC_FETCH_AND_1:
695 case BUILT_IN_ATOMIC_FETCH_AND_2:
696 case BUILT_IN_ATOMIC_FETCH_AND_4:
697 case BUILT_IN_ATOMIC_FETCH_AND_8:
698 case BUILT_IN_ATOMIC_FETCH_AND_16:
699
700 case BUILT_IN_ATOMIC_FETCH_NAND_1:
701 case BUILT_IN_ATOMIC_FETCH_NAND_2:
702 case BUILT_IN_ATOMIC_FETCH_NAND_4:
703 case BUILT_IN_ATOMIC_FETCH_NAND_8:
704 case BUILT_IN_ATOMIC_FETCH_NAND_16:
705
706 case BUILT_IN_ATOMIC_FETCH_XOR_1:
707 case BUILT_IN_ATOMIC_FETCH_XOR_2:
708 case BUILT_IN_ATOMIC_FETCH_XOR_4:
709 case BUILT_IN_ATOMIC_FETCH_XOR_8:
710 case BUILT_IN_ATOMIC_FETCH_XOR_16:
711
712 case BUILT_IN_ATOMIC_FETCH_OR_1:
713 case BUILT_IN_ATOMIC_FETCH_OR_2:
714 case BUILT_IN_ATOMIC_FETCH_OR_4:
715 case BUILT_IN_ATOMIC_FETCH_OR_8:
716 case BUILT_IN_ATOMIC_FETCH_OR_16:
717 {
718 dest = gimple_call_arg (call, 0);
719 /* DEST represents the address of a memory location.
720 instrument_derefs wants the memory location, so lets
721 dereference the address DEST before handing it to
722 instrument_derefs. */
723 if (TREE_CODE (dest) == ADDR_EXPR)
724 dest = TREE_OPERAND (dest, 0);
725 else if (TREE_CODE (dest) == SSA_NAME || TREE_CODE (dest) == INTEGER_CST)
726 dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
727 dest, build_int_cst (TREE_TYPE (dest), 0));
728 else
729 gcc_unreachable ();
730
731 access_size = int_size_in_bytes (TREE_TYPE (dest));
732 }
733
734 default:
735 /* The other builtins memory access are not instrumented in this
736 function because they either don't have any length parameter,
737 or their length parameter is just a limit. */
738 break;
739 }
740
741 if (len != NULL_TREE)
742 {
743 if (source0 != NULL_TREE)
744 {
745 src0->start = source0;
746 src0->access_size = access_size;
747 *src0_len = len;
748 *src0_is_store = false;
749 }
750
751 if (source1 != NULL_TREE)
752 {
753 src1->start = source1;
754 src1->access_size = access_size;
755 *src1_len = len;
756 *src1_is_store = false;
757 }
758
759 if (dest != NULL_TREE)
760 {
761 dst->start = dest;
762 dst->access_size = access_size;
763 *dst_len = len;
764 *dst_is_store = true;
765 }
766
767 got_reference_p = true;
768 }
769 else if (dest)
770 {
771 dst->start = dest;
772 dst->access_size = access_size;
773 *dst_len = NULL_TREE;
774 *dst_is_store = is_store;
775 *dest_is_deref = true;
776 got_reference_p = true;
777 }
778
779 return got_reference_p;
780 }
781
782 /* Return true iff a given gimple statement has been instrumented.
783 Note that the statement is "defined" by the memory references it
784 contains. */
785
786 static bool
787 has_stmt_been_instrumented_p (gimple stmt)
788 {
789 if (gimple_assign_single_p (stmt))
790 {
791 bool r_is_store;
792 asan_mem_ref r;
793 asan_mem_ref_init (&r, NULL, 1);
794
795 if (get_mem_ref_of_assignment (stmt, &r, &r_is_store))
796 return has_mem_ref_been_instrumented (&r);
797 }
798 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
799 {
800 asan_mem_ref src0, src1, dest;
801 asan_mem_ref_init (&src0, NULL, 1);
802 asan_mem_ref_init (&src1, NULL, 1);
803 asan_mem_ref_init (&dest, NULL, 1);
804
805 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
806 bool src0_is_store = false, src1_is_store = false,
807 dest_is_store = false, dest_is_deref = false;
808 if (get_mem_refs_of_builtin_call (stmt,
809 &src0, &src0_len, &src0_is_store,
810 &src1, &src1_len, &src1_is_store,
811 &dest, &dest_len, &dest_is_store,
812 &dest_is_deref))
813 {
814 if (src0.start != NULL_TREE
815 && !has_mem_ref_been_instrumented (&src0, src0_len))
816 return false;
817
818 if (src1.start != NULL_TREE
819 && !has_mem_ref_been_instrumented (&src1, src1_len))
820 return false;
821
822 if (dest.start != NULL_TREE
823 && !has_mem_ref_been_instrumented (&dest, dest_len))
824 return false;
825
826 return true;
827 }
828 }
829 return false;
830 }
831
832 /* Insert a memory reference into the hash table. */
833
834 static void
835 update_mem_ref_hash_table (tree ref, char access_size)
836 {
837 hash_table <asan_mem_ref_hasher> ht = get_mem_ref_hash_table ();
838
839 asan_mem_ref r;
840 asan_mem_ref_init (&r, ref, access_size);
841
842 asan_mem_ref **slot = ht.find_slot (&r, INSERT);
843 if (*slot == NULL)
844 *slot = asan_mem_ref_new (ref, access_size);
845 }
846
847 /* Initialize shadow_ptr_types array. */
848
849 static void
850 asan_init_shadow_ptr_types (void)
851 {
852 asan_shadow_set = new_alias_set ();
853 shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
854 TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
855 shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
856 shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
857 TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
858 shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
859 initialize_sanitizer_builtins ();
860 }
861
862 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
863
864 static tree
865 asan_pp_string (pretty_printer *pp)
866 {
867 const char *buf = pp_formatted_text (pp);
868 size_t len = strlen (buf);
869 tree ret = build_string (len + 1, buf);
870 TREE_TYPE (ret)
871 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
872 build_index_type (size_int (len)));
873 TREE_READONLY (ret) = 1;
874 TREE_STATIC (ret) = 1;
875 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
876 }
877
878 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
879
880 static rtx
881 asan_shadow_cst (unsigned char shadow_bytes[4])
882 {
883 int i;
884 unsigned HOST_WIDE_INT val = 0;
885 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
886 for (i = 0; i < 4; i++)
887 val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
888 << (BITS_PER_UNIT * i);
889 return gen_int_mode (val, SImode);
890 }
891
892 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
893 though. */
894
895 static void
896 asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
897 {
898 rtx insn, insns, top_label, end, addr, tmp, jump;
899
900 start_sequence ();
901 clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
902 insns = get_insns ();
903 end_sequence ();
904 for (insn = insns; insn; insn = NEXT_INSN (insn))
905 if (CALL_P (insn))
906 break;
907 if (insn == NULL_RTX)
908 {
909 emit_insn (insns);
910 return;
911 }
912
913 gcc_assert ((len & 3) == 0);
914 top_label = gen_label_rtx ();
915 addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
916 shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
917 end = force_reg (Pmode, plus_constant (Pmode, addr, len));
918 emit_label (top_label);
919
920 emit_move_insn (shadow_mem, const0_rtx);
921 tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
922 true, OPTAB_LIB_WIDEN);
923 if (tmp != addr)
924 emit_move_insn (addr, tmp);
925 emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
926 jump = get_last_insn ();
927 gcc_assert (JUMP_P (jump));
928 add_int_reg_note (jump, REG_BR_PROB, REG_BR_PROB_BASE * 80 / 100);
929 }
930
931 void
932 asan_function_start (void)
933 {
934 section *fnsec = function_section (current_function_decl);
935 switch_to_section (fnsec);
936 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
937 current_function_funcdef_no);
938 }
939
940 /* Insert code to protect stack vars. The prologue sequence should be emitted
941 directly, epilogue sequence returned. BASE is the register holding the
942 stack base, against which OFFSETS array offsets are relative to, OFFSETS
943 array contains pairs of offsets in reverse order, always the end offset
944 of some gap that needs protection followed by starting offset,
945 and DECLS is an array of representative decls for each var partition.
946 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
947 elements long (OFFSETS include gap before the first variable as well
948 as gaps after each stack variable). */
949
950 rtx
951 asan_emit_stack_protection (rtx base, HOST_WIDE_INT *offsets, tree *decls,
952 int length)
953 {
954 rtx shadow_base, shadow_mem, ret, mem;
955 char buf[30];
956 unsigned char shadow_bytes[4];
957 HOST_WIDE_INT base_offset = offsets[length - 1], offset, prev_offset;
958 HOST_WIDE_INT last_offset, last_size;
959 int l;
960 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
961 tree str_cst, decl, id;
962
963 if (shadow_ptr_types[0] == NULL_TREE)
964 asan_init_shadow_ptr_types ();
965
966 /* First of all, prepare the description string. */
967 pretty_printer asan_pp;
968
969 pp_decimal_int (&asan_pp, length / 2 - 1);
970 pp_space (&asan_pp);
971 for (l = length - 2; l; l -= 2)
972 {
973 tree decl = decls[l / 2 - 1];
974 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
975 pp_space (&asan_pp);
976 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
977 pp_space (&asan_pp);
978 if (DECL_P (decl) && DECL_NAME (decl))
979 {
980 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
981 pp_space (&asan_pp);
982 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
983 }
984 else
985 pp_string (&asan_pp, "9 <unknown>");
986 pp_space (&asan_pp);
987 }
988 str_cst = asan_pp_string (&asan_pp);
989
990 /* Emit the prologue sequence. */
991 base = expand_binop (Pmode, add_optab, base,
992 gen_int_mode (base_offset, Pmode),
993 NULL_RTX, 1, OPTAB_DIRECT);
994 mem = gen_rtx_MEM (ptr_mode, base);
995 emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
996 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
997 emit_move_insn (mem, expand_normal (str_cst));
998 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
999 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
1000 id = get_identifier (buf);
1001 decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1002 VAR_DECL, id, char_type_node);
1003 SET_DECL_ASSEMBLER_NAME (decl, id);
1004 TREE_ADDRESSABLE (decl) = 1;
1005 TREE_READONLY (decl) = 1;
1006 DECL_ARTIFICIAL (decl) = 1;
1007 DECL_IGNORED_P (decl) = 1;
1008 TREE_STATIC (decl) = 1;
1009 TREE_PUBLIC (decl) = 0;
1010 TREE_USED (decl) = 1;
1011 DECL_INITIAL (decl) = decl;
1012 TREE_ASM_WRITTEN (decl) = 1;
1013 TREE_ASM_WRITTEN (id) = 1;
1014 emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
1015 shadow_base = expand_binop (Pmode, lshr_optab, base,
1016 GEN_INT (ASAN_SHADOW_SHIFT),
1017 NULL_RTX, 1, OPTAB_DIRECT);
1018 shadow_base = expand_binop (Pmode, add_optab, shadow_base,
1019 gen_int_mode (targetm.asan_shadow_offset (),
1020 Pmode),
1021 NULL_RTX, 1, OPTAB_DIRECT);
1022 gcc_assert (asan_shadow_set != -1
1023 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1024 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
1025 set_mem_alias_set (shadow_mem, asan_shadow_set);
1026 prev_offset = base_offset;
1027 for (l = length; l; l -= 2)
1028 {
1029 if (l == 2)
1030 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1031 offset = offsets[l - 1];
1032 if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
1033 {
1034 int i;
1035 HOST_WIDE_INT aoff
1036 = base_offset + ((offset - base_offset)
1037 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1038 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1039 (aoff - prev_offset)
1040 >> ASAN_SHADOW_SHIFT);
1041 prev_offset = aoff;
1042 for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
1043 if (aoff < offset)
1044 {
1045 if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
1046 shadow_bytes[i] = 0;
1047 else
1048 shadow_bytes[i] = offset - aoff;
1049 }
1050 else
1051 shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
1052 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1053 offset = aoff;
1054 }
1055 while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
1056 {
1057 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1058 (offset - prev_offset)
1059 >> ASAN_SHADOW_SHIFT);
1060 prev_offset = offset;
1061 memset (shadow_bytes, cur_shadow_byte, 4);
1062 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1063 offset += ASAN_RED_ZONE_SIZE;
1064 }
1065 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1066 }
1067 do_pending_stack_adjust ();
1068
1069 /* Construct epilogue sequence. */
1070 start_sequence ();
1071
1072 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1073 set_mem_alias_set (shadow_mem, asan_shadow_set);
1074 prev_offset = base_offset;
1075 last_offset = base_offset;
1076 last_size = 0;
1077 for (l = length; l; l -= 2)
1078 {
1079 offset = base_offset + ((offsets[l - 1] - base_offset)
1080 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1081 if (last_offset + last_size != offset)
1082 {
1083 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1084 (last_offset - prev_offset)
1085 >> ASAN_SHADOW_SHIFT);
1086 prev_offset = last_offset;
1087 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1088 last_offset = offset;
1089 last_size = 0;
1090 }
1091 last_size += base_offset + ((offsets[l - 2] - base_offset)
1092 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1093 - offset;
1094 }
1095 if (last_size)
1096 {
1097 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1098 (last_offset - prev_offset)
1099 >> ASAN_SHADOW_SHIFT);
1100 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1101 }
1102
1103 do_pending_stack_adjust ();
1104
1105 ret = get_insns ();
1106 end_sequence ();
1107 return ret;
1108 }
1109
1110 /* Return true if DECL, a global var, might be overridden and needs
1111 therefore a local alias. */
1112
1113 static bool
1114 asan_needs_local_alias (tree decl)
1115 {
1116 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1117 }
1118
1119 /* Return true if DECL is a VAR_DECL that should be protected
1120 by Address Sanitizer, by appending a red zone with protected
1121 shadow memory after it and aligning it to at least
1122 ASAN_RED_ZONE_SIZE bytes. */
1123
1124 bool
1125 asan_protect_global (tree decl)
1126 {
1127 rtx rtl, symbol;
1128
1129 if (TREE_CODE (decl) == STRING_CST)
1130 {
1131 /* Instrument all STRING_CSTs except those created
1132 by asan_pp_string here. */
1133 if (shadow_ptr_types[0] != NULL_TREE
1134 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1135 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1136 return false;
1137 return true;
1138 }
1139 if (TREE_CODE (decl) != VAR_DECL
1140 /* TLS vars aren't statically protectable. */
1141 || DECL_THREAD_LOCAL_P (decl)
1142 /* Externs will be protected elsewhere. */
1143 || DECL_EXTERNAL (decl)
1144 || !DECL_RTL_SET_P (decl)
1145 /* Comdat vars pose an ABI problem, we can't know if
1146 the var that is selected by the linker will have
1147 padding or not. */
1148 || DECL_ONE_ONLY (decl)
1149 /* Similarly for common vars. People can use -fno-common. */
1150 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
1151 /* Don't protect if using user section, often vars placed
1152 into user section from multiple TUs are then assumed
1153 to be an array of such vars, putting padding in there
1154 breaks this assumption. */
1155 || (DECL_SECTION_NAME (decl) != NULL_TREE
1156 && !DECL_HAS_IMPLICIT_SECTION_NAME_P (decl))
1157 || DECL_SIZE (decl) == 0
1158 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1159 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1160 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE)
1161 return false;
1162
1163 rtl = DECL_RTL (decl);
1164 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1165 return false;
1166 symbol = XEXP (rtl, 0);
1167
1168 if (CONSTANT_POOL_ADDRESS_P (symbol)
1169 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1170 return false;
1171
1172 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1173 return false;
1174
1175 #ifndef ASM_OUTPUT_DEF
1176 if (asan_needs_local_alias (decl))
1177 return false;
1178 #endif
1179
1180 return true;
1181 }
1182
1183 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16}.
1184 IS_STORE is either 1 (for a store) or 0 (for a load).
1185 SIZE_IN_BYTES is one of 1, 2, 4, 8, 16. */
1186
1187 static tree
1188 report_error_func (bool is_store, int size_in_bytes)
1189 {
1190 static enum built_in_function report[2][5]
1191 = { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1192 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1193 BUILT_IN_ASAN_REPORT_LOAD16 },
1194 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1195 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1196 BUILT_IN_ASAN_REPORT_STORE16 } };
1197 return builtin_decl_implicit (report[is_store][exact_log2 (size_in_bytes)]);
1198 }
1199
1200 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 2000 - 1)
1201 #define PROB_ALWAYS (REG_BR_PROB_BASE)
1202
1203 /* Split the current basic block and create a condition statement
1204 insertion point right before or after the statement pointed to by
1205 ITER. Return an iterator to the point at which the caller might
1206 safely insert the condition statement.
1207
1208 THEN_BLOCK must be set to the address of an uninitialized instance
1209 of basic_block. The function will then set *THEN_BLOCK to the
1210 'then block' of the condition statement to be inserted by the
1211 caller.
1212
1213 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1214 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1215
1216 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1217 block' of the condition statement to be inserted by the caller.
1218
1219 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1220 statements starting from *ITER, and *THEN_BLOCK is a new empty
1221 block.
1222
1223 *ITER is adjusted to point to always point to the first statement
1224 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1225 same as what ITER was pointing to prior to calling this function,
1226 if BEFORE_P is true; otherwise, it is its following statement. */
1227
1228 static gimple_stmt_iterator
1229 create_cond_insert_point (gimple_stmt_iterator *iter,
1230 bool before_p,
1231 bool then_more_likely_p,
1232 bool create_then_fallthru_edge,
1233 basic_block *then_block,
1234 basic_block *fallthrough_block)
1235 {
1236 gimple_stmt_iterator gsi = *iter;
1237
1238 if (!gsi_end_p (gsi) && before_p)
1239 gsi_prev (&gsi);
1240
1241 basic_block cur_bb = gsi_bb (*iter);
1242
1243 edge e = split_block (cur_bb, gsi_stmt (gsi));
1244
1245 /* Get a hold on the 'condition block', the 'then block' and the
1246 'else block'. */
1247 basic_block cond_bb = e->src;
1248 basic_block fallthru_bb = e->dest;
1249 basic_block then_bb = create_empty_bb (cond_bb);
1250 if (current_loops)
1251 {
1252 add_bb_to_loop (then_bb, cond_bb->loop_father);
1253 loops_state_set (LOOPS_NEED_FIXUP);
1254 }
1255
1256 /* Set up the newly created 'then block'. */
1257 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1258 int fallthrough_probability
1259 = then_more_likely_p
1260 ? PROB_VERY_UNLIKELY
1261 : PROB_ALWAYS - PROB_VERY_UNLIKELY;
1262 e->probability = PROB_ALWAYS - fallthrough_probability;
1263 if (create_then_fallthru_edge)
1264 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1265
1266 /* Set up the fallthrough basic block. */
1267 e = find_edge (cond_bb, fallthru_bb);
1268 e->flags = EDGE_FALSE_VALUE;
1269 e->count = cond_bb->count;
1270 e->probability = fallthrough_probability;
1271
1272 /* Update dominance info for the newly created then_bb; note that
1273 fallthru_bb's dominance info has already been updated by
1274 split_bock. */
1275 if (dom_info_available_p (CDI_DOMINATORS))
1276 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1277
1278 *then_block = then_bb;
1279 *fallthrough_block = fallthru_bb;
1280 *iter = gsi_start_bb (fallthru_bb);
1281
1282 return gsi_last_bb (cond_bb);
1283 }
1284
1285 /* Insert an if condition followed by a 'then block' right before the
1286 statement pointed to by ITER. The fallthrough block -- which is the
1287 else block of the condition as well as the destination of the
1288 outcoming edge of the 'then block' -- starts with the statement
1289 pointed to by ITER.
1290
1291 COND is the condition of the if.
1292
1293 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1294 'then block' is higher than the probability of the edge to the
1295 fallthrough block.
1296
1297 Upon completion of the function, *THEN_BB is set to the newly
1298 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1299 fallthrough block.
1300
1301 *ITER is adjusted to still point to the same statement it was
1302 pointing to initially. */
1303
1304 static void
1305 insert_if_then_before_iter (gimple cond,
1306 gimple_stmt_iterator *iter,
1307 bool then_more_likely_p,
1308 basic_block *then_bb,
1309 basic_block *fallthrough_bb)
1310 {
1311 gimple_stmt_iterator cond_insert_point =
1312 create_cond_insert_point (iter,
1313 /*before_p=*/true,
1314 then_more_likely_p,
1315 /*create_then_fallthru_edge=*/true,
1316 then_bb,
1317 fallthrough_bb);
1318 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
1319 }
1320
1321 /* Instrument the memory access instruction BASE. Insert new
1322 statements before or after ITER.
1323
1324 Note that the memory access represented by BASE can be either an
1325 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1326 location. IS_STORE is TRUE for a store, FALSE for a load.
1327 BEFORE_P is TRUE for inserting the instrumentation code before
1328 ITER, FALSE for inserting it after ITER. SIZE_IN_BYTES is one of
1329 1, 2, 4, 8, 16.
1330
1331 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1332 statement it was pointing to prior to calling this function,
1333 otherwise, it points to the statement logically following it. */
1334
1335 static void
1336 build_check_stmt (location_t location, tree base, gimple_stmt_iterator *iter,
1337 bool before_p, bool is_store, int size_in_bytes)
1338 {
1339 gimple_stmt_iterator gsi;
1340 basic_block then_bb, else_bb;
1341 tree t, base_addr, shadow;
1342 gimple g;
1343 tree shadow_ptr_type = shadow_ptr_types[size_in_bytes == 16 ? 1 : 0];
1344 tree shadow_type = TREE_TYPE (shadow_ptr_type);
1345 tree uintptr_type
1346 = build_nonstandard_integer_type (TYPE_PRECISION (TREE_TYPE (base)), 1);
1347 tree base_ssa = base;
1348
1349 /* Get an iterator on the point where we can add the condition
1350 statement for the instrumentation. */
1351 gsi = create_cond_insert_point (iter, before_p,
1352 /*then_more_likely_p=*/false,
1353 /*create_then_fallthru_edge=*/false,
1354 &then_bb,
1355 &else_bb);
1356
1357 base = unshare_expr (base);
1358
1359 /* BASE can already be an SSA_NAME; in that case, do not create a
1360 new SSA_NAME for it. */
1361 if (TREE_CODE (base) != SSA_NAME)
1362 {
1363 g = gimple_build_assign_with_ops (TREE_CODE (base),
1364 make_ssa_name (TREE_TYPE (base), NULL),
1365 base, NULL_TREE);
1366 gimple_set_location (g, location);
1367 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1368 base_ssa = gimple_assign_lhs (g);
1369 }
1370
1371 g = gimple_build_assign_with_ops (NOP_EXPR,
1372 make_ssa_name (uintptr_type, NULL),
1373 base_ssa, NULL_TREE);
1374 gimple_set_location (g, location);
1375 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1376 base_addr = gimple_assign_lhs (g);
1377
1378 /* Build
1379 (base_addr >> ASAN_SHADOW_SHIFT) + targetm.asan_shadow_offset (). */
1380
1381 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
1382 g = gimple_build_assign_with_ops (RSHIFT_EXPR,
1383 make_ssa_name (uintptr_type, NULL),
1384 base_addr, t);
1385 gimple_set_location (g, location);
1386 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1387
1388 t = build_int_cst (uintptr_type, targetm.asan_shadow_offset ());
1389 g = gimple_build_assign_with_ops (PLUS_EXPR,
1390 make_ssa_name (uintptr_type, NULL),
1391 gimple_assign_lhs (g), t);
1392 gimple_set_location (g, location);
1393 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1394
1395 g = gimple_build_assign_with_ops (NOP_EXPR,
1396 make_ssa_name (shadow_ptr_type, NULL),
1397 gimple_assign_lhs (g), NULL_TREE);
1398 gimple_set_location (g, location);
1399 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1400
1401 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
1402 build_int_cst (shadow_ptr_type, 0));
1403 g = gimple_build_assign_with_ops (MEM_REF,
1404 make_ssa_name (shadow_type, NULL),
1405 t, NULL_TREE);
1406 gimple_set_location (g, location);
1407 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1408 shadow = gimple_assign_lhs (g);
1409
1410 if (size_in_bytes < 8)
1411 {
1412 /* Slow path for 1, 2 and 4 byte accesses.
1413 Test (shadow != 0)
1414 & ((base_addr & 7) + (size_in_bytes - 1)) >= shadow). */
1415 gimple_seq seq = NULL;
1416 gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
1417 gimple_seq_add_stmt (&seq, shadow_test);
1418 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, base_addr, 7));
1419 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
1420 gimple_seq_last (seq)));
1421 if (size_in_bytes > 1)
1422 gimple_seq_add_stmt (&seq,
1423 build_assign (PLUS_EXPR, gimple_seq_last (seq),
1424 size_in_bytes - 1));
1425 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, gimple_seq_last (seq),
1426 shadow));
1427 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
1428 gimple_seq_last (seq)));
1429 t = gimple_assign_lhs (gimple_seq_last (seq));
1430 gimple_seq_set_location (seq, location);
1431 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
1432 }
1433 else
1434 t = shadow;
1435
1436 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
1437 NULL_TREE, NULL_TREE);
1438 gimple_set_location (g, location);
1439 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1440
1441 /* Generate call to the run-time library (e.g. __asan_report_load8). */
1442 gsi = gsi_start_bb (then_bb);
1443 g = gimple_build_call (report_error_func (is_store, size_in_bytes),
1444 1, base_addr);
1445 gimple_set_location (g, location);
1446 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1447
1448 *iter = gsi_start_bb (else_bb);
1449 }
1450
1451 /* If T represents a memory access, add instrumentation code before ITER.
1452 LOCATION is source code location.
1453 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
1454
1455 static void
1456 instrument_derefs (gimple_stmt_iterator *iter, tree t,
1457 location_t location, bool is_store)
1458 {
1459 tree type, base;
1460 HOST_WIDE_INT size_in_bytes;
1461
1462 type = TREE_TYPE (t);
1463 switch (TREE_CODE (t))
1464 {
1465 case ARRAY_REF:
1466 case COMPONENT_REF:
1467 case INDIRECT_REF:
1468 case MEM_REF:
1469 break;
1470 default:
1471 return;
1472 }
1473
1474 size_in_bytes = int_size_in_bytes (type);
1475 if ((size_in_bytes & (size_in_bytes - 1)) != 0
1476 || (unsigned HOST_WIDE_INT) size_in_bytes - 1 >= 16)
1477 return;
1478
1479 HOST_WIDE_INT bitsize, bitpos;
1480 tree offset;
1481 enum machine_mode mode;
1482 int volatilep = 0, unsignedp = 0;
1483 get_inner_reference (t, &bitsize, &bitpos, &offset,
1484 &mode, &unsignedp, &volatilep, false);
1485 if (bitpos % (size_in_bytes * BITS_PER_UNIT)
1486 || bitsize != size_in_bytes * BITS_PER_UNIT)
1487 {
1488 if (TREE_CODE (t) == COMPONENT_REF
1489 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
1490 {
1491 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
1492 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
1493 TREE_OPERAND (t, 0), repr,
1494 NULL_TREE), location, is_store);
1495 }
1496 return;
1497 }
1498
1499 base = build_fold_addr_expr (t);
1500 if (!has_mem_ref_been_instrumented (base, size_in_bytes))
1501 {
1502 build_check_stmt (location, base, iter, /*before_p=*/true,
1503 is_store, size_in_bytes);
1504 update_mem_ref_hash_table (base, size_in_bytes);
1505 update_mem_ref_hash_table (t, size_in_bytes);
1506 }
1507
1508 }
1509
1510 /* Instrument an access to a contiguous memory region that starts at
1511 the address pointed to by BASE, over a length of LEN (expressed in
1512 the sizeof (*BASE) bytes). ITER points to the instruction before
1513 which the instrumentation instructions must be inserted. LOCATION
1514 is the source location that the instrumentation instructions must
1515 have. If IS_STORE is true, then the memory access is a store;
1516 otherwise, it's a load. */
1517
1518 static void
1519 instrument_mem_region_access (tree base, tree len,
1520 gimple_stmt_iterator *iter,
1521 location_t location, bool is_store)
1522 {
1523 if (!POINTER_TYPE_P (TREE_TYPE (base))
1524 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
1525 || integer_zerop (len))
1526 return;
1527
1528 gimple_stmt_iterator gsi = *iter;
1529
1530 basic_block fallthrough_bb = NULL, then_bb = NULL;
1531
1532 /* If the beginning of the memory region has already been
1533 instrumented, do not instrument it. */
1534 bool start_instrumented = has_mem_ref_been_instrumented (base, 1);
1535
1536 /* If the end of the memory region has already been instrumented, do
1537 not instrument it. */
1538 tree end = asan_mem_ref_get_end (base, len);
1539 bool end_instrumented = has_mem_ref_been_instrumented (end, 1);
1540
1541 if (start_instrumented && end_instrumented)
1542 return;
1543
1544 if (!is_gimple_constant (len))
1545 {
1546 /* So, the length of the memory area to asan-protect is
1547 non-constant. Let's guard the generated instrumentation code
1548 like:
1549
1550 if (len != 0)
1551 {
1552 //asan instrumentation code goes here.
1553 }
1554 // falltrough instructions, starting with *ITER. */
1555
1556 gimple g = gimple_build_cond (NE_EXPR,
1557 len,
1558 build_int_cst (TREE_TYPE (len), 0),
1559 NULL_TREE, NULL_TREE);
1560 gimple_set_location (g, location);
1561 insert_if_then_before_iter (g, iter, /*then_more_likely_p=*/true,
1562 &then_bb, &fallthrough_bb);
1563 /* Note that fallthrough_bb starts with the statement that was
1564 pointed to by ITER. */
1565
1566 /* The 'then block' of the 'if (len != 0) condition is where
1567 we'll generate the asan instrumentation code now. */
1568 gsi = gsi_last_bb (then_bb);
1569 }
1570
1571 if (!start_instrumented)
1572 {
1573 /* Instrument the beginning of the memory region to be accessed,
1574 and arrange for the rest of the intrumentation code to be
1575 inserted in the then block *after* the current gsi. */
1576 build_check_stmt (location, base, &gsi, /*before_p=*/true, is_store, 1);
1577
1578 if (then_bb)
1579 /* We are in the case where the length of the region is not
1580 constant; so instrumentation code is being generated in the
1581 'then block' of the 'if (len != 0) condition. Let's arrange
1582 for the subsequent instrumentation statements to go in the
1583 'then block'. */
1584 gsi = gsi_last_bb (then_bb);
1585 else
1586 {
1587 *iter = gsi;
1588 /* Don't remember this access as instrumented, if length
1589 is unknown. It might be zero and not being actually
1590 instrumented, so we can't rely on it being instrumented. */
1591 update_mem_ref_hash_table (base, 1);
1592 }
1593 }
1594
1595 if (end_instrumented)
1596 return;
1597
1598 /* We want to instrument the access at the end of the memory region,
1599 which is at (base + len - 1). */
1600
1601 /* offset = len - 1; */
1602 len = unshare_expr (len);
1603 tree offset;
1604 gimple_seq seq = NULL;
1605 if (TREE_CODE (len) == INTEGER_CST)
1606 offset = fold_build2 (MINUS_EXPR, size_type_node,
1607 fold_convert (size_type_node, len),
1608 build_int_cst (size_type_node, 1));
1609 else
1610 {
1611 gimple g;
1612 tree t;
1613
1614 if (TREE_CODE (len) != SSA_NAME)
1615 {
1616 t = make_ssa_name (TREE_TYPE (len), NULL);
1617 g = gimple_build_assign_with_ops (TREE_CODE (len), t, len, NULL);
1618 gimple_set_location (g, location);
1619 gimple_seq_add_stmt_without_update (&seq, g);
1620 len = t;
1621 }
1622 if (!useless_type_conversion_p (size_type_node, TREE_TYPE (len)))
1623 {
1624 t = make_ssa_name (size_type_node, NULL);
1625 g = gimple_build_assign_with_ops (NOP_EXPR, t, len, NULL);
1626 gimple_set_location (g, location);
1627 gimple_seq_add_stmt_without_update (&seq, g);
1628 len = t;
1629 }
1630
1631 t = make_ssa_name (size_type_node, NULL);
1632 g = gimple_build_assign_with_ops (MINUS_EXPR, t, len,
1633 build_int_cst (size_type_node, 1));
1634 gimple_set_location (g, location);
1635 gimple_seq_add_stmt_without_update (&seq, g);
1636 offset = gimple_assign_lhs (g);
1637 }
1638
1639 /* _1 = base; */
1640 base = unshare_expr (base);
1641 gimple region_end =
1642 gimple_build_assign_with_ops (TREE_CODE (base),
1643 make_ssa_name (TREE_TYPE (base), NULL),
1644 base, NULL);
1645 gimple_set_location (region_end, location);
1646 gimple_seq_add_stmt_without_update (&seq, region_end);
1647
1648 /* _2 = _1 + offset; */
1649 region_end =
1650 gimple_build_assign_with_ops (POINTER_PLUS_EXPR,
1651 make_ssa_name (TREE_TYPE (base), NULL),
1652 gimple_assign_lhs (region_end),
1653 offset);
1654 gimple_set_location (region_end, location);
1655 gimple_seq_add_stmt_without_update (&seq, region_end);
1656 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
1657
1658 /* instrument access at _2; */
1659 gsi = gsi_for_stmt (region_end);
1660 build_check_stmt (location, gimple_assign_lhs (region_end),
1661 &gsi, /*before_p=*/false, is_store, 1);
1662
1663 if (then_bb == NULL)
1664 update_mem_ref_hash_table (end, 1);
1665
1666 *iter = gsi_for_stmt (gsi_stmt (*iter));
1667 }
1668
1669 /* Instrument the call (to the builtin strlen function) pointed to by
1670 ITER.
1671
1672 This function instruments the access to the first byte of the
1673 argument, right before the call. After the call it instruments the
1674 access to the last byte of the argument; it uses the result of the
1675 call to deduce the offset of that last byte.
1676
1677 Upon completion, iff the call has actually been instrumented, this
1678 function returns TRUE and *ITER points to the statement logically
1679 following the built-in strlen function call *ITER was initially
1680 pointing to. Otherwise, the function returns FALSE and *ITER
1681 remains unchanged. */
1682
1683 static bool
1684 instrument_strlen_call (gimple_stmt_iterator *iter)
1685 {
1686 gimple call = gsi_stmt (*iter);
1687 gcc_assert (is_gimple_call (call));
1688
1689 tree callee = gimple_call_fndecl (call);
1690 gcc_assert (is_builtin_fn (callee)
1691 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
1692 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STRLEN);
1693
1694 tree len = gimple_call_lhs (call);
1695 if (len == NULL)
1696 /* Some passes might clear the return value of the strlen call;
1697 bail out in that case. Return FALSE as we are not advancing
1698 *ITER. */
1699 return false;
1700 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (len)));
1701
1702 location_t loc = gimple_location (call);
1703 tree str_arg = gimple_call_arg (call, 0);
1704
1705 /* Instrument the access to the first byte of str_arg. i.e:
1706
1707 _1 = str_arg; instrument (_1); */
1708 tree cptr_type = build_pointer_type (char_type_node);
1709 gimple str_arg_ssa =
1710 gimple_build_assign_with_ops (NOP_EXPR,
1711 make_ssa_name (cptr_type, NULL),
1712 str_arg, NULL);
1713 gimple_set_location (str_arg_ssa, loc);
1714 gimple_stmt_iterator gsi = *iter;
1715 gsi_insert_before (&gsi, str_arg_ssa, GSI_NEW_STMT);
1716 build_check_stmt (loc, gimple_assign_lhs (str_arg_ssa), &gsi,
1717 /*before_p=*/false, /*is_store=*/false, 1);
1718
1719 /* If we initially had an instruction like:
1720
1721 int n = strlen (str)
1722
1723 we now want to instrument the access to str[n], after the
1724 instruction above.*/
1725
1726 /* So let's build the access to str[n] that is, access through the
1727 pointer_plus expr: (_1 + len). */
1728 gimple stmt =
1729 gimple_build_assign_with_ops (POINTER_PLUS_EXPR,
1730 make_ssa_name (cptr_type, NULL),
1731 gimple_assign_lhs (str_arg_ssa),
1732 len);
1733 gimple_set_location (stmt, loc);
1734 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
1735
1736 build_check_stmt (loc, gimple_assign_lhs (stmt), &gsi,
1737 /*before_p=*/false, /*is_store=*/false, 1);
1738
1739 /* Ensure that iter points to the statement logically following the
1740 one it was initially pointing to. */
1741 *iter = gsi;
1742 /* As *ITER has been advanced to point to the next statement, let's
1743 return true to inform transform_statements that it shouldn't
1744 advance *ITER anymore; otherwises it will skip that next
1745 statement, which wouldn't be instrumented. */
1746 return true;
1747 }
1748
1749 /* Instrument the call to a built-in memory access function that is
1750 pointed to by the iterator ITER.
1751
1752 Upon completion, return TRUE iff *ITER has been advanced to the
1753 statement following the one it was originally pointing to. */
1754
1755 static bool
1756 instrument_builtin_call (gimple_stmt_iterator *iter)
1757 {
1758 bool iter_advanced_p = false;
1759 gimple call = gsi_stmt (*iter);
1760
1761 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
1762
1763 tree callee = gimple_call_fndecl (call);
1764 location_t loc = gimple_location (call);
1765
1766 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STRLEN)
1767 iter_advanced_p = instrument_strlen_call (iter);
1768 else
1769 {
1770 asan_mem_ref src0, src1, dest;
1771 asan_mem_ref_init (&src0, NULL, 1);
1772 asan_mem_ref_init (&src1, NULL, 1);
1773 asan_mem_ref_init (&dest, NULL, 1);
1774
1775 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
1776 bool src0_is_store = false, src1_is_store = false,
1777 dest_is_store = false, dest_is_deref = false;
1778
1779 if (get_mem_refs_of_builtin_call (call,
1780 &src0, &src0_len, &src0_is_store,
1781 &src1, &src1_len, &src1_is_store,
1782 &dest, &dest_len, &dest_is_store,
1783 &dest_is_deref))
1784 {
1785 if (dest_is_deref)
1786 {
1787 instrument_derefs (iter, dest.start, loc, dest_is_store);
1788 gsi_next (iter);
1789 iter_advanced_p = true;
1790 }
1791 else if (src0_len || src1_len || dest_len)
1792 {
1793 if (src0.start != NULL_TREE)
1794 instrument_mem_region_access (src0.start, src0_len,
1795 iter, loc, /*is_store=*/false);
1796 if (src1.start != NULL_TREE)
1797 instrument_mem_region_access (src1.start, src1_len,
1798 iter, loc, /*is_store=*/false);
1799 if (dest.start != NULL_TREE)
1800 instrument_mem_region_access (dest.start, dest_len,
1801 iter, loc, /*is_store=*/true);
1802 *iter = gsi_for_stmt (call);
1803 gsi_next (iter);
1804 iter_advanced_p = true;
1805 }
1806 }
1807 }
1808 return iter_advanced_p;
1809 }
1810
1811 /* Instrument the assignment statement ITER if it is subject to
1812 instrumentation. Return TRUE iff instrumentation actually
1813 happened. In that case, the iterator ITER is advanced to the next
1814 logical expression following the one initially pointed to by ITER,
1815 and the relevant memory reference that which access has been
1816 instrumented is added to the memory references hash table. */
1817
1818 static bool
1819 maybe_instrument_assignment (gimple_stmt_iterator *iter)
1820 {
1821 gimple s = gsi_stmt (*iter);
1822
1823 gcc_assert (gimple_assign_single_p (s));
1824
1825 tree ref_expr = NULL_TREE;
1826 bool is_store, is_instrumented = false;
1827
1828 if (gimple_store_p (s))
1829 {
1830 ref_expr = gimple_assign_lhs (s);
1831 is_store = true;
1832 instrument_derefs (iter, ref_expr,
1833 gimple_location (s),
1834 is_store);
1835 is_instrumented = true;
1836 }
1837
1838 if (gimple_assign_load_p (s))
1839 {
1840 ref_expr = gimple_assign_rhs1 (s);
1841 is_store = false;
1842 instrument_derefs (iter, ref_expr,
1843 gimple_location (s),
1844 is_store);
1845 is_instrumented = true;
1846 }
1847
1848 if (is_instrumented)
1849 gsi_next (iter);
1850
1851 return is_instrumented;
1852 }
1853
1854 /* Instrument the function call pointed to by the iterator ITER, if it
1855 is subject to instrumentation. At the moment, the only function
1856 calls that are instrumented are some built-in functions that access
1857 memory. Look at instrument_builtin_call to learn more.
1858
1859 Upon completion return TRUE iff *ITER was advanced to the statement
1860 following the one it was originally pointing to. */
1861
1862 static bool
1863 maybe_instrument_call (gimple_stmt_iterator *iter)
1864 {
1865 gimple stmt = gsi_stmt (*iter);
1866 bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
1867
1868 if (is_builtin && instrument_builtin_call (iter))
1869 return true;
1870
1871 if (gimple_call_noreturn_p (stmt))
1872 {
1873 if (is_builtin)
1874 {
1875 tree callee = gimple_call_fndecl (stmt);
1876 switch (DECL_FUNCTION_CODE (callee))
1877 {
1878 case BUILT_IN_UNREACHABLE:
1879 case BUILT_IN_TRAP:
1880 /* Don't instrument these. */
1881 return false;
1882 }
1883 }
1884 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
1885 gimple g = gimple_build_call (decl, 0);
1886 gimple_set_location (g, gimple_location (stmt));
1887 gsi_insert_before (iter, g, GSI_SAME_STMT);
1888 }
1889 return false;
1890 }
1891
1892 /* Walk each instruction of all basic block and instrument those that
1893 represent memory references: loads, stores, or function calls.
1894 In a given basic block, this function avoids instrumenting memory
1895 references that have already been instrumented. */
1896
1897 static void
1898 transform_statements (void)
1899 {
1900 basic_block bb, last_bb = NULL;
1901 gimple_stmt_iterator i;
1902 int saved_last_basic_block = last_basic_block;
1903
1904 FOR_EACH_BB (bb)
1905 {
1906 basic_block prev_bb = bb;
1907
1908 if (bb->index >= saved_last_basic_block) continue;
1909
1910 /* Flush the mem ref hash table, if current bb doesn't have
1911 exactly one predecessor, or if that predecessor (skipping
1912 over asan created basic blocks) isn't the last processed
1913 basic block. Thus we effectively flush on extended basic
1914 block boundaries. */
1915 while (single_pred_p (prev_bb))
1916 {
1917 prev_bb = single_pred (prev_bb);
1918 if (prev_bb->index < saved_last_basic_block)
1919 break;
1920 }
1921 if (prev_bb != last_bb)
1922 empty_mem_ref_hash_table ();
1923 last_bb = bb;
1924
1925 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
1926 {
1927 gimple s = gsi_stmt (i);
1928
1929 if (has_stmt_been_instrumented_p (s))
1930 gsi_next (&i);
1931 else if (gimple_assign_single_p (s)
1932 && maybe_instrument_assignment (&i))
1933 /* Nothing to do as maybe_instrument_assignment advanced
1934 the iterator I. */;
1935 else if (is_gimple_call (s) && maybe_instrument_call (&i))
1936 /* Nothing to do as maybe_instrument_call
1937 advanced the iterator I. */;
1938 else
1939 {
1940 /* No instrumentation happened.
1941
1942 If the current instruction is a function call that
1943 might free something, let's forget about the memory
1944 references that got instrumented. Otherwise we might
1945 miss some instrumentation opportunities. */
1946 if (is_gimple_call (s) && !nonfreeing_call_p (s))
1947 empty_mem_ref_hash_table ();
1948
1949 gsi_next (&i);
1950 }
1951 }
1952 }
1953 free_mem_ref_resources ();
1954 }
1955
1956 /* Build
1957 struct __asan_global
1958 {
1959 const void *__beg;
1960 uptr __size;
1961 uptr __size_with_redzone;
1962 const void *__name;
1963 const void *__module_name;
1964 uptr __has_dynamic_init;
1965 } type. */
1966
1967 static tree
1968 asan_global_struct (void)
1969 {
1970 static const char *field_names[6]
1971 = { "__beg", "__size", "__size_with_redzone",
1972 "__name", "__module_name", "__has_dynamic_init" };
1973 tree fields[6], ret;
1974 int i;
1975
1976 ret = make_node (RECORD_TYPE);
1977 for (i = 0; i < 6; i++)
1978 {
1979 fields[i]
1980 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
1981 get_identifier (field_names[i]),
1982 (i == 0 || i == 3) ? const_ptr_type_node
1983 : pointer_sized_int_node);
1984 DECL_CONTEXT (fields[i]) = ret;
1985 if (i)
1986 DECL_CHAIN (fields[i - 1]) = fields[i];
1987 }
1988 TYPE_FIELDS (ret) = fields[0];
1989 TYPE_NAME (ret) = get_identifier ("__asan_global");
1990 layout_type (ret);
1991 return ret;
1992 }
1993
1994 /* Append description of a single global DECL into vector V.
1995 TYPE is __asan_global struct type as returned by asan_global_struct. */
1996
1997 static void
1998 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
1999 {
2000 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2001 unsigned HOST_WIDE_INT size;
2002 tree str_cst, module_name_cst, refdecl = decl;
2003 vec<constructor_elt, va_gc> *vinner = NULL;
2004
2005 pretty_printer asan_pp, module_name_pp;
2006
2007 if (DECL_NAME (decl))
2008 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
2009 else
2010 pp_string (&asan_pp, "<unknown>");
2011 str_cst = asan_pp_string (&asan_pp);
2012
2013 pp_string (&module_name_pp, main_input_filename);
2014 module_name_cst = asan_pp_string (&module_name_pp);
2015
2016 if (asan_needs_local_alias (decl))
2017 {
2018 char buf[20];
2019 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
2020 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2021 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2022 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2023 TREE_READONLY (refdecl) = TREE_READONLY (decl);
2024 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2025 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2026 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2027 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2028 TREE_STATIC (refdecl) = 1;
2029 TREE_PUBLIC (refdecl) = 0;
2030 TREE_USED (refdecl) = 1;
2031 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2032 }
2033
2034 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2035 fold_convert (const_ptr_type_node,
2036 build_fold_addr_expr (refdecl)));
2037 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2038 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2039 size += asan_red_zone_size (size);
2040 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2041 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2042 fold_convert (const_ptr_type_node, str_cst));
2043 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2044 fold_convert (const_ptr_type_node, module_name_cst));
2045 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, 0));
2046 init = build_constructor (type, vinner);
2047 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2048 }
2049
2050 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2051 void
2052 initialize_sanitizer_builtins (void)
2053 {
2054 tree decl;
2055
2056 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2057 return;
2058
2059 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2060 tree BT_FN_VOID_PTR
2061 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2062 tree BT_FN_VOID_PTR_PTR
2063 = build_function_type_list (void_type_node, ptr_type_node,
2064 ptr_type_node, NULL_TREE);
2065 tree BT_FN_VOID_PTR_PTR_PTR
2066 = build_function_type_list (void_type_node, ptr_type_node,
2067 ptr_type_node, ptr_type_node, NULL_TREE);
2068 tree BT_FN_VOID_PTR_PTRMODE
2069 = build_function_type_list (void_type_node, ptr_type_node,
2070 pointer_sized_int_node, NULL_TREE);
2071 tree BT_FN_VOID_INT
2072 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
2073 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2074 tree BT_FN_IX_CONST_VPTR_INT[5];
2075 tree BT_FN_IX_VPTR_IX_INT[5];
2076 tree BT_FN_VOID_VPTR_IX_INT[5];
2077 tree vptr
2078 = build_pointer_type (build_qualified_type (void_type_node,
2079 TYPE_QUAL_VOLATILE));
2080 tree cvptr
2081 = build_pointer_type (build_qualified_type (void_type_node,
2082 TYPE_QUAL_VOLATILE
2083 |TYPE_QUAL_CONST));
2084 tree boolt
2085 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2086 int i;
2087 for (i = 0; i < 5; i++)
2088 {
2089 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2090 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2091 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
2092 integer_type_node, integer_type_node,
2093 NULL_TREE);
2094 BT_FN_IX_CONST_VPTR_INT[i]
2095 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2096 BT_FN_IX_VPTR_IX_INT[i]
2097 = build_function_type_list (ix, vptr, ix, integer_type_node,
2098 NULL_TREE);
2099 BT_FN_VOID_VPTR_IX_INT[i]
2100 = build_function_type_list (void_type_node, vptr, ix,
2101 integer_type_node, NULL_TREE);
2102 }
2103 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2104 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2105 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2106 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2107 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2108 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2109 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2110 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2111 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2112 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2113 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2114 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2115 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2116 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2117 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2118 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2119 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2120 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2121 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2122 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2123 #undef ATTR_NOTHROW_LEAF_LIST
2124 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2125 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2126 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2127 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2128 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2129 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2130 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2131 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2132 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2133 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2134 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2135 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2136 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2137 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2138 #undef DEF_SANITIZER_BUILTIN
2139 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2140 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2141 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2142 set_call_expr_flags (decl, ATTRS); \
2143 set_builtin_decl (ENUM, decl, true);
2144
2145 #include "sanitizer.def"
2146
2147 #undef DEF_SANITIZER_BUILTIN
2148 }
2149
2150 /* Called via htab_traverse. Count number of emitted
2151 STRING_CSTs in the constant hash table. */
2152
2153 static int
2154 count_string_csts (void **slot, void *data)
2155 {
2156 struct constant_descriptor_tree *desc
2157 = (struct constant_descriptor_tree *) *slot;
2158 if (TREE_CODE (desc->value) == STRING_CST
2159 && TREE_ASM_WRITTEN (desc->value)
2160 && asan_protect_global (desc->value))
2161 ++*((unsigned HOST_WIDE_INT *) data);
2162 return 1;
2163 }
2164
2165 /* Helper structure to pass two parameters to
2166 add_string_csts. */
2167
2168 struct asan_add_string_csts_data
2169 {
2170 tree type;
2171 vec<constructor_elt, va_gc> *v;
2172 };
2173
2174 /* Called via htab_traverse. Call asan_add_global
2175 on emitted STRING_CSTs from the constant hash table. */
2176
2177 static int
2178 add_string_csts (void **slot, void *data)
2179 {
2180 struct constant_descriptor_tree *desc
2181 = (struct constant_descriptor_tree *) *slot;
2182 if (TREE_CODE (desc->value) == STRING_CST
2183 && TREE_ASM_WRITTEN (desc->value)
2184 && asan_protect_global (desc->value))
2185 {
2186 struct asan_add_string_csts_data *aascd
2187 = (struct asan_add_string_csts_data *) data;
2188 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
2189 aascd->type, aascd->v);
2190 }
2191 return 1;
2192 }
2193
2194 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2195 invoke ggc_collect. */
2196 static GTY(()) tree asan_ctor_statements;
2197
2198 /* Module-level instrumentation.
2199 - Insert __asan_init_vN() into the list of CTORs.
2200 - TODO: insert redzones around globals.
2201 */
2202
2203 void
2204 asan_finish_file (void)
2205 {
2206 struct varpool_node *vnode;
2207 unsigned HOST_WIDE_INT gcount = 0;
2208
2209 if (shadow_ptr_types[0] == NULL_TREE)
2210 asan_init_shadow_ptr_types ();
2211 /* Avoid instrumenting code in the asan ctors/dtors.
2212 We don't need to insert padding after the description strings,
2213 nor after .LASAN* array. */
2214 flag_sanitize &= ~SANITIZE_ADDRESS;
2215
2216 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
2217 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2218 FOR_EACH_DEFINED_VARIABLE (vnode)
2219 if (TREE_ASM_WRITTEN (vnode->decl)
2220 && asan_protect_global (vnode->decl))
2221 ++gcount;
2222 htab_t const_desc_htab = constant_pool_htab ();
2223 htab_traverse (const_desc_htab, count_string_csts, &gcount);
2224 if (gcount)
2225 {
2226 tree type = asan_global_struct (), var, ctor;
2227 tree dtor_statements = NULL_TREE;
2228 vec<constructor_elt, va_gc> *v;
2229 char buf[20];
2230
2231 type = build_array_type_nelts (type, gcount);
2232 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
2233 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2234 type);
2235 TREE_STATIC (var) = 1;
2236 TREE_PUBLIC (var) = 0;
2237 DECL_ARTIFICIAL (var) = 1;
2238 DECL_IGNORED_P (var) = 1;
2239 vec_alloc (v, gcount);
2240 FOR_EACH_DEFINED_VARIABLE (vnode)
2241 if (TREE_ASM_WRITTEN (vnode->decl)
2242 && asan_protect_global (vnode->decl))
2243 asan_add_global (vnode->decl, TREE_TYPE (type), v);
2244 struct asan_add_string_csts_data aascd;
2245 aascd.type = TREE_TYPE (type);
2246 aascd.v = v;
2247 htab_traverse (const_desc_htab, add_string_csts, &aascd);
2248 ctor = build_constructor (type, v);
2249 TREE_CONSTANT (ctor) = 1;
2250 TREE_STATIC (ctor) = 1;
2251 DECL_INITIAL (var) = ctor;
2252 varpool_assemble_decl (varpool_node_for_decl (var));
2253
2254 fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
2255 tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
2256 append_to_statement_list (build_call_expr (fn, 2,
2257 build_fold_addr_expr (var),
2258 gcount_tree),
2259 &asan_ctor_statements);
2260
2261 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
2262 append_to_statement_list (build_call_expr (fn, 2,
2263 build_fold_addr_expr (var),
2264 gcount_tree),
2265 &dtor_statements);
2266 cgraph_build_static_cdtor ('D', dtor_statements,
2267 MAX_RESERVED_INIT_PRIORITY - 1);
2268 }
2269 cgraph_build_static_cdtor ('I', asan_ctor_statements,
2270 MAX_RESERVED_INIT_PRIORITY - 1);
2271 flag_sanitize |= SANITIZE_ADDRESS;
2272 }
2273
2274 /* Instrument the current function. */
2275
2276 static unsigned int
2277 asan_instrument (void)
2278 {
2279 if (shadow_ptr_types[0] == NULL_TREE)
2280 asan_init_shadow_ptr_types ();
2281 transform_statements ();
2282 return 0;
2283 }
2284
2285 static bool
2286 gate_asan (void)
2287 {
2288 return (flag_sanitize & SANITIZE_ADDRESS) != 0
2289 && !lookup_attribute ("no_sanitize_address",
2290 DECL_ATTRIBUTES (current_function_decl));
2291 }
2292
2293 namespace {
2294
2295 const pass_data pass_data_asan =
2296 {
2297 GIMPLE_PASS, /* type */
2298 "asan", /* name */
2299 OPTGROUP_NONE, /* optinfo_flags */
2300 true, /* has_gate */
2301 true, /* has_execute */
2302 TV_NONE, /* tv_id */
2303 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2304 0, /* properties_provided */
2305 0, /* properties_destroyed */
2306 0, /* todo_flags_start */
2307 ( TODO_verify_flow | TODO_verify_stmts
2308 | TODO_update_ssa ), /* todo_flags_finish */
2309 };
2310
2311 class pass_asan : public gimple_opt_pass
2312 {
2313 public:
2314 pass_asan (gcc::context *ctxt)
2315 : gimple_opt_pass (pass_data_asan, ctxt)
2316 {}
2317
2318 /* opt_pass methods: */
2319 opt_pass * clone () { return new pass_asan (m_ctxt); }
2320 bool gate () { return gate_asan (); }
2321 unsigned int execute () { return asan_instrument (); }
2322
2323 }; // class pass_asan
2324
2325 } // anon namespace
2326
2327 gimple_opt_pass *
2328 make_pass_asan (gcc::context *ctxt)
2329 {
2330 return new pass_asan (ctxt);
2331 }
2332
2333 static bool
2334 gate_asan_O0 (void)
2335 {
2336 return !optimize && gate_asan ();
2337 }
2338
2339 namespace {
2340
2341 const pass_data pass_data_asan_O0 =
2342 {
2343 GIMPLE_PASS, /* type */
2344 "asan0", /* name */
2345 OPTGROUP_NONE, /* optinfo_flags */
2346 true, /* has_gate */
2347 true, /* has_execute */
2348 TV_NONE, /* tv_id */
2349 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2350 0, /* properties_provided */
2351 0, /* properties_destroyed */
2352 0, /* todo_flags_start */
2353 ( TODO_verify_flow | TODO_verify_stmts
2354 | TODO_update_ssa ), /* todo_flags_finish */
2355 };
2356
2357 class pass_asan_O0 : public gimple_opt_pass
2358 {
2359 public:
2360 pass_asan_O0 (gcc::context *ctxt)
2361 : gimple_opt_pass (pass_data_asan_O0, ctxt)
2362 {}
2363
2364 /* opt_pass methods: */
2365 bool gate () { return gate_asan_O0 (); }
2366 unsigned int execute () { return asan_instrument (); }
2367
2368 }; // class pass_asan_O0
2369
2370 } // anon namespace
2371
2372 gimple_opt_pass *
2373 make_pass_asan_O0 (gcc::context *ctxt)
2374 {
2375 return new pass_asan_O0 (ctxt);
2376 }
2377
2378 /* Perform optimization of sanitize functions. */
2379
2380 static unsigned int
2381 execute_sanopt (void)
2382 {
2383 basic_block bb;
2384
2385 FOR_EACH_BB (bb)
2386 {
2387 gimple_stmt_iterator gsi;
2388 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2389 {
2390 gimple stmt = gsi_stmt (gsi);
2391
2392 if (!is_gimple_call (stmt))
2393 continue;
2394
2395 if (gimple_call_internal_p (stmt))
2396 switch (gimple_call_internal_fn (stmt))
2397 {
2398 case IFN_UBSAN_NULL:
2399 ubsan_expand_null_ifn (gsi);
2400 break;
2401 default:
2402 break;
2403 }
2404
2405 if (dump_file && (dump_flags & TDF_DETAILS))
2406 {
2407 fprintf (dump_file, "Optimized\n ");
2408 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2409 fprintf (dump_file, "\n");
2410 }
2411 }
2412 }
2413 return 0;
2414 }
2415
2416 static bool
2417 gate_sanopt (void)
2418 {
2419 return flag_sanitize;
2420 }
2421
2422 namespace {
2423
2424 const pass_data pass_data_sanopt =
2425 {
2426 GIMPLE_PASS, /* type */
2427 "sanopt", /* name */
2428 OPTGROUP_NONE, /* optinfo_flags */
2429 true, /* has_gate */
2430 true, /* has_execute */
2431 TV_NONE, /* tv_id */
2432 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2433 0, /* properties_provided */
2434 0, /* properties_destroyed */
2435 0, /* todo_flags_start */
2436 ( TODO_verify_flow | TODO_verify_stmts
2437 | TODO_update_ssa ), /* todo_flags_finish */
2438 };
2439
2440 class pass_sanopt : public gimple_opt_pass
2441 {
2442 public:
2443 pass_sanopt (gcc::context *ctxt)
2444 : gimple_opt_pass (pass_data_sanopt, ctxt)
2445 {}
2446
2447 /* opt_pass methods: */
2448 bool gate () { return gate_sanopt (); }
2449 unsigned int execute () { return execute_sanopt (); }
2450
2451 }; // class pass_sanopt
2452
2453 } // anon namespace
2454
2455 gimple_opt_pass *
2456 make_pass_sanopt (gcc::context *ctxt)
2457 {
2458 return new pass_sanopt (ctxt);
2459 }
2460
2461 #include "gt-asan.h"