coretypes.h: Include hash-table.h and hash-set.h for host files.
[gcc.git] / gcc / asan.c
1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2015 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "input.h"
26 #include "alias.h"
27 #include "symtab.h"
28 #include "options.h"
29 #include "tree.h"
30 #include "fold-const.h"
31 #include "predict.h"
32 #include "tm.h"
33 #include "hard-reg-set.h"
34 #include "function.h"
35 #include "dominance.h"
36 #include "cfg.h"
37 #include "cfganal.h"
38 #include "basic-block.h"
39 #include "tree-ssa-alias.h"
40 #include "internal-fn.h"
41 #include "gimple-expr.h"
42 #include "is-a.h"
43 #include "gimple.h"
44 #include "gimplify.h"
45 #include "gimple-iterator.h"
46 #include "calls.h"
47 #include "varasm.h"
48 #include "stor-layout.h"
49 #include "tree-iterator.h"
50 #include "plugin-api.h"
51 #include "ipa-ref.h"
52 #include "cgraph.h"
53 #include "stringpool.h"
54 #include "tree-ssanames.h"
55 #include "tree-pass.h"
56 #include "asan.h"
57 #include "gimple-pretty-print.h"
58 #include "target.h"
59 #include "rtl.h"
60 #include "flags.h"
61 #include "insn-config.h"
62 #include "expmed.h"
63 #include "dojump.h"
64 #include "explow.h"
65 #include "emit-rtl.h"
66 #include "stmt.h"
67 #include "expr.h"
68 #include "insn-codes.h"
69 #include "optabs.h"
70 #include "output.h"
71 #include "tm_p.h"
72 #include "langhooks.h"
73 #include "alloc-pool.h"
74 #include "cfgloop.h"
75 #include "gimple-builder.h"
76 #include "ubsan.h"
77 #include "params.h"
78 #include "builtins.h"
79 #include "fnmatch.h"
80
81 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
82 with <2x slowdown on average.
83
84 The tool consists of two parts:
85 instrumentation module (this file) and a run-time library.
86 The instrumentation module adds a run-time check before every memory insn.
87 For a 8- or 16- byte load accessing address X:
88 ShadowAddr = (X >> 3) + Offset
89 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
90 if (ShadowValue)
91 __asan_report_load8(X);
92 For a load of N bytes (N=1, 2 or 4) from address X:
93 ShadowAddr = (X >> 3) + Offset
94 ShadowValue = *(char*)ShadowAddr;
95 if (ShadowValue)
96 if ((X & 7) + N - 1 > ShadowValue)
97 __asan_report_loadN(X);
98 Stores are instrumented similarly, but using __asan_report_storeN functions.
99 A call too __asan_init_vN() is inserted to the list of module CTORs.
100 N is the version number of the AddressSanitizer API. The changes between the
101 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
102
103 The run-time library redefines malloc (so that redzone are inserted around
104 the allocated memory) and free (so that reuse of free-ed memory is delayed),
105 provides __asan_report* and __asan_init_vN functions.
106
107 Read more:
108 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
109
110 The current implementation supports detection of out-of-bounds and
111 use-after-free in the heap, on the stack and for global variables.
112
113 [Protection of stack variables]
114
115 To understand how detection of out-of-bounds and use-after-free works
116 for stack variables, lets look at this example on x86_64 where the
117 stack grows downward:
118
119 int
120 foo ()
121 {
122 char a[23] = {0};
123 int b[2] = {0};
124
125 a[5] = 1;
126 b[1] = 2;
127
128 return a[5] + b[1];
129 }
130
131 For this function, the stack protected by asan will be organized as
132 follows, from the top of the stack to the bottom:
133
134 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
135
136 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
137 the next slot be 32 bytes aligned; this one is called Partial
138 Redzone; this 32 bytes alignment is an asan constraint]
139
140 Slot 3/ [24 bytes for variable 'a']
141
142 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
143
144 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
145
146 Slot 6/ [8 bytes for variable 'b']
147
148 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
149 'LEFT RedZone']
150
151 The 32 bytes of LEFT red zone at the bottom of the stack can be
152 decomposed as such:
153
154 1/ The first 8 bytes contain a magical asan number that is always
155 0x41B58AB3.
156
157 2/ The following 8 bytes contains a pointer to a string (to be
158 parsed at runtime by the runtime asan library), which format is
159 the following:
160
161 "<function-name> <space> <num-of-variables-on-the-stack>
162 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
163 <length-of-var-in-bytes> ){n} "
164
165 where '(...){n}' means the content inside the parenthesis occurs 'n'
166 times, with 'n' being the number of variables on the stack.
167
168 3/ The following 8 bytes contain the PC of the current function which
169 will be used by the run-time library to print an error message.
170
171 4/ The following 8 bytes are reserved for internal use by the run-time.
172
173 The shadow memory for that stack layout is going to look like this:
174
175 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
176 The F1 byte pattern is a magic number called
177 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
178 the memory for that shadow byte is part of a the LEFT red zone
179 intended to seat at the bottom of the variables on the stack.
180
181 - content of shadow memory 8 bytes for slots 6 and 5:
182 0xF4F4F400. The F4 byte pattern is a magic number
183 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
184 memory region for this shadow byte is a PARTIAL red zone
185 intended to pad a variable A, so that the slot following
186 {A,padding} is 32 bytes aligned.
187
188 Note that the fact that the least significant byte of this
189 shadow memory content is 00 means that 8 bytes of its
190 corresponding memory (which corresponds to the memory of
191 variable 'b') is addressable.
192
193 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
194 The F2 byte pattern is a magic number called
195 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
196 region for this shadow byte is a MIDDLE red zone intended to
197 seat between two 32 aligned slots of {variable,padding}.
198
199 - content of shadow memory 8 bytes for slot 3 and 2:
200 0xF4000000. This represents is the concatenation of
201 variable 'a' and the partial red zone following it, like what we
202 had for variable 'b'. The least significant 3 bytes being 00
203 means that the 3 bytes of variable 'a' are addressable.
204
205 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
206 The F3 byte pattern is a magic number called
207 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
208 region for this shadow byte is a RIGHT red zone intended to seat
209 at the top of the variables of the stack.
210
211 Note that the real variable layout is done in expand_used_vars in
212 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
213 stack variables as well as the different red zones, emits some
214 prologue code to populate the shadow memory as to poison (mark as
215 non-accessible) the regions of the red zones and mark the regions of
216 stack variables as accessible, and emit some epilogue code to
217 un-poison (mark as accessible) the regions of red zones right before
218 the function exits.
219
220 [Protection of global variables]
221
222 The basic idea is to insert a red zone between two global variables
223 and install a constructor function that calls the asan runtime to do
224 the populating of the relevant shadow memory regions at load time.
225
226 So the global variables are laid out as to insert a red zone between
227 them. The size of the red zones is so that each variable starts on a
228 32 bytes boundary.
229
230 Then a constructor function is installed so that, for each global
231 variable, it calls the runtime asan library function
232 __asan_register_globals_with an instance of this type:
233
234 struct __asan_global
235 {
236 // Address of the beginning of the global variable.
237 const void *__beg;
238
239 // Initial size of the global variable.
240 uptr __size;
241
242 // Size of the global variable + size of the red zone. This
243 // size is 32 bytes aligned.
244 uptr __size_with_redzone;
245
246 // Name of the global variable.
247 const void *__name;
248
249 // Name of the module where the global variable is declared.
250 const void *__module_name;
251
252 // 1 if it has dynamic initialization, 0 otherwise.
253 uptr __has_dynamic_init;
254
255 // A pointer to struct that contains source location, could be NULL.
256 __asan_global_source_location *__location;
257 }
258
259 A destructor function that calls the runtime asan library function
260 _asan_unregister_globals is also installed. */
261
262 static unsigned HOST_WIDE_INT asan_shadow_offset_value;
263 static bool asan_shadow_offset_computed;
264 static vec<char *> sanitized_sections;
265
266 /* Sets shadow offset to value in string VAL. */
267
268 bool
269 set_asan_shadow_offset (const char *val)
270 {
271 char *endp;
272
273 errno = 0;
274 #ifdef HAVE_LONG_LONG
275 asan_shadow_offset_value = strtoull (val, &endp, 0);
276 #else
277 asan_shadow_offset_value = strtoul (val, &endp, 0);
278 #endif
279 if (!(*val != '\0' && *endp == '\0' && errno == 0))
280 return false;
281
282 asan_shadow_offset_computed = true;
283
284 return true;
285 }
286
287 /* Set list of user-defined sections that need to be sanitized. */
288
289 void
290 set_sanitized_sections (const char *sections)
291 {
292 char *pat;
293 unsigned i;
294 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
295 free (pat);
296 sanitized_sections.truncate (0);
297
298 for (const char *s = sections; *s; )
299 {
300 const char *end;
301 for (end = s; *end && *end != ','; ++end);
302 size_t len = end - s;
303 sanitized_sections.safe_push (xstrndup (s, len));
304 s = *end ? end + 1 : end;
305 }
306 }
307
308 /* Checks whether section SEC should be sanitized. */
309
310 static bool
311 section_sanitized_p (const char *sec)
312 {
313 char *pat;
314 unsigned i;
315 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
316 if (fnmatch (pat, sec, FNM_PERIOD) == 0)
317 return true;
318 return false;
319 }
320
321 /* Returns Asan shadow offset. */
322
323 static unsigned HOST_WIDE_INT
324 asan_shadow_offset ()
325 {
326 if (!asan_shadow_offset_computed)
327 {
328 asan_shadow_offset_computed = true;
329 asan_shadow_offset_value = targetm.asan_shadow_offset ();
330 }
331 return asan_shadow_offset_value;
332 }
333
334 alias_set_type asan_shadow_set = -1;
335
336 /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
337 alias set is used for all shadow memory accesses. */
338 static GTY(()) tree shadow_ptr_types[2];
339
340 /* Decl for __asan_option_detect_stack_use_after_return. */
341 static GTY(()) tree asan_detect_stack_use_after_return;
342
343 /* Various flags for Asan builtins. */
344 enum asan_check_flags
345 {
346 ASAN_CHECK_STORE = 1 << 0,
347 ASAN_CHECK_SCALAR_ACCESS = 1 << 1,
348 ASAN_CHECK_NON_ZERO_LEN = 1 << 2,
349 ASAN_CHECK_LAST = 1 << 3
350 };
351
352 /* Hashtable support for memory references used by gimple
353 statements. */
354
355 /* This type represents a reference to a memory region. */
356 struct asan_mem_ref
357 {
358 /* The expression of the beginning of the memory region. */
359 tree start;
360
361 /* The size of the access. */
362 HOST_WIDE_INT access_size;
363
364 /* Pool allocation new operator. */
365 inline void *operator new (size_t)
366 {
367 return pool.allocate ();
368 }
369
370 /* Delete operator utilizing pool allocation. */
371 inline void operator delete (void *ptr)
372 {
373 pool.remove ((asan_mem_ref *) ptr);
374 }
375
376 /* Memory allocation pool. */
377 static pool_allocator<asan_mem_ref> pool;
378 };
379
380 pool_allocator<asan_mem_ref> asan_mem_ref::pool ("asan_mem_ref", 10);
381
382 /* Initializes an instance of asan_mem_ref. */
383
384 static void
385 asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
386 {
387 ref->start = start;
388 ref->access_size = access_size;
389 }
390
391 /* Allocates memory for an instance of asan_mem_ref into the memory
392 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
393 START is the address of (or the expression pointing to) the
394 beginning of memory reference. ACCESS_SIZE is the size of the
395 access to the referenced memory. */
396
397 static asan_mem_ref*
398 asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
399 {
400 asan_mem_ref *ref = new asan_mem_ref;
401
402 asan_mem_ref_init (ref, start, access_size);
403 return ref;
404 }
405
406 /* This builds and returns a pointer to the end of the memory region
407 that starts at START and of length LEN. */
408
409 tree
410 asan_mem_ref_get_end (tree start, tree len)
411 {
412 if (len == NULL_TREE || integer_zerop (len))
413 return start;
414
415 if (!ptrofftype_p (len))
416 len = convert_to_ptrofftype (len);
417
418 return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
419 }
420
421 /* Return a tree expression that represents the end of the referenced
422 memory region. Beware that this function can actually build a new
423 tree expression. */
424
425 tree
426 asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
427 {
428 return asan_mem_ref_get_end (ref->start, len);
429 }
430
431 struct asan_mem_ref_hasher
432 : typed_noop_remove <asan_mem_ref>
433 {
434 typedef asan_mem_ref *value_type;
435 typedef asan_mem_ref *compare_type;
436
437 static inline hashval_t hash (const asan_mem_ref *);
438 static inline bool equal (const asan_mem_ref *, const asan_mem_ref *);
439 };
440
441 /* Hash a memory reference. */
442
443 inline hashval_t
444 asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
445 {
446 return iterative_hash_expr (mem_ref->start, 0);
447 }
448
449 /* Compare two memory references. We accept the length of either
450 memory references to be NULL_TREE. */
451
452 inline bool
453 asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
454 const asan_mem_ref *m2)
455 {
456 return operand_equal_p (m1->start, m2->start, 0);
457 }
458
459 static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
460
461 /* Returns a reference to the hash table containing memory references.
462 This function ensures that the hash table is created. Note that
463 this hash table is updated by the function
464 update_mem_ref_hash_table. */
465
466 static hash_table<asan_mem_ref_hasher> *
467 get_mem_ref_hash_table ()
468 {
469 if (!asan_mem_ref_ht)
470 asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
471
472 return asan_mem_ref_ht;
473 }
474
475 /* Clear all entries from the memory references hash table. */
476
477 static void
478 empty_mem_ref_hash_table ()
479 {
480 if (asan_mem_ref_ht)
481 asan_mem_ref_ht->empty ();
482 }
483
484 /* Free the memory references hash table. */
485
486 static void
487 free_mem_ref_resources ()
488 {
489 delete asan_mem_ref_ht;
490 asan_mem_ref_ht = NULL;
491
492 asan_mem_ref::pool.release ();
493 }
494
495 /* Return true iff the memory reference REF has been instrumented. */
496
497 static bool
498 has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
499 {
500 asan_mem_ref r;
501 asan_mem_ref_init (&r, ref, access_size);
502
503 asan_mem_ref *saved_ref = get_mem_ref_hash_table ()->find (&r);
504 return saved_ref && saved_ref->access_size >= access_size;
505 }
506
507 /* Return true iff the memory reference REF has been instrumented. */
508
509 static bool
510 has_mem_ref_been_instrumented (const asan_mem_ref *ref)
511 {
512 return has_mem_ref_been_instrumented (ref->start, ref->access_size);
513 }
514
515 /* Return true iff access to memory region starting at REF and of
516 length LEN has been instrumented. */
517
518 static bool
519 has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
520 {
521 HOST_WIDE_INT size_in_bytes
522 = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
523
524 return size_in_bytes != -1
525 && has_mem_ref_been_instrumented (ref->start, size_in_bytes);
526 }
527
528 /* Set REF to the memory reference present in a gimple assignment
529 ASSIGNMENT. Return true upon successful completion, false
530 otherwise. */
531
532 static bool
533 get_mem_ref_of_assignment (const gassign *assignment,
534 asan_mem_ref *ref,
535 bool *ref_is_store)
536 {
537 gcc_assert (gimple_assign_single_p (assignment));
538
539 if (gimple_store_p (assignment)
540 && !gimple_clobber_p (assignment))
541 {
542 ref->start = gimple_assign_lhs (assignment);
543 *ref_is_store = true;
544 }
545 else if (gimple_assign_load_p (assignment))
546 {
547 ref->start = gimple_assign_rhs1 (assignment);
548 *ref_is_store = false;
549 }
550 else
551 return false;
552
553 ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
554 return true;
555 }
556
557 /* Return the memory references contained in a gimple statement
558 representing a builtin call that has to do with memory access. */
559
560 static bool
561 get_mem_refs_of_builtin_call (const gcall *call,
562 asan_mem_ref *src0,
563 tree *src0_len,
564 bool *src0_is_store,
565 asan_mem_ref *src1,
566 tree *src1_len,
567 bool *src1_is_store,
568 asan_mem_ref *dst,
569 tree *dst_len,
570 bool *dst_is_store,
571 bool *dest_is_deref,
572 bool *intercepted_p)
573 {
574 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
575
576 tree callee = gimple_call_fndecl (call);
577 tree source0 = NULL_TREE, source1 = NULL_TREE,
578 dest = NULL_TREE, len = NULL_TREE;
579 bool is_store = true, got_reference_p = false;
580 HOST_WIDE_INT access_size = 1;
581
582 *intercepted_p = asan_intercepted_p ((DECL_FUNCTION_CODE (callee)));
583
584 switch (DECL_FUNCTION_CODE (callee))
585 {
586 /* (s, s, n) style memops. */
587 case BUILT_IN_BCMP:
588 case BUILT_IN_MEMCMP:
589 source0 = gimple_call_arg (call, 0);
590 source1 = gimple_call_arg (call, 1);
591 len = gimple_call_arg (call, 2);
592 break;
593
594 /* (src, dest, n) style memops. */
595 case BUILT_IN_BCOPY:
596 source0 = gimple_call_arg (call, 0);
597 dest = gimple_call_arg (call, 1);
598 len = gimple_call_arg (call, 2);
599 break;
600
601 /* (dest, src, n) style memops. */
602 case BUILT_IN_MEMCPY:
603 case BUILT_IN_MEMCPY_CHK:
604 case BUILT_IN_MEMMOVE:
605 case BUILT_IN_MEMMOVE_CHK:
606 case BUILT_IN_MEMPCPY:
607 case BUILT_IN_MEMPCPY_CHK:
608 dest = gimple_call_arg (call, 0);
609 source0 = gimple_call_arg (call, 1);
610 len = gimple_call_arg (call, 2);
611 break;
612
613 /* (dest, n) style memops. */
614 case BUILT_IN_BZERO:
615 dest = gimple_call_arg (call, 0);
616 len = gimple_call_arg (call, 1);
617 break;
618
619 /* (dest, x, n) style memops*/
620 case BUILT_IN_MEMSET:
621 case BUILT_IN_MEMSET_CHK:
622 dest = gimple_call_arg (call, 0);
623 len = gimple_call_arg (call, 2);
624 break;
625
626 case BUILT_IN_STRLEN:
627 source0 = gimple_call_arg (call, 0);
628 len = gimple_call_lhs (call);
629 break ;
630
631 /* And now the __atomic* and __sync builtins.
632 These are handled differently from the classical memory memory
633 access builtins above. */
634
635 case BUILT_IN_ATOMIC_LOAD_1:
636 case BUILT_IN_ATOMIC_LOAD_2:
637 case BUILT_IN_ATOMIC_LOAD_4:
638 case BUILT_IN_ATOMIC_LOAD_8:
639 case BUILT_IN_ATOMIC_LOAD_16:
640 is_store = false;
641 /* fall through. */
642
643 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
644 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
645 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
646 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
647 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
648
649 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
650 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
651 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
652 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
653 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
654
655 case BUILT_IN_SYNC_FETCH_AND_OR_1:
656 case BUILT_IN_SYNC_FETCH_AND_OR_2:
657 case BUILT_IN_SYNC_FETCH_AND_OR_4:
658 case BUILT_IN_SYNC_FETCH_AND_OR_8:
659 case BUILT_IN_SYNC_FETCH_AND_OR_16:
660
661 case BUILT_IN_SYNC_FETCH_AND_AND_1:
662 case BUILT_IN_SYNC_FETCH_AND_AND_2:
663 case BUILT_IN_SYNC_FETCH_AND_AND_4:
664 case BUILT_IN_SYNC_FETCH_AND_AND_8:
665 case BUILT_IN_SYNC_FETCH_AND_AND_16:
666
667 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
668 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
669 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
670 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
671 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
672
673 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
674 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
675 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
676 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
677
678 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
679 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
680 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
681 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
682 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
683
684 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
685 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
686 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
687 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
688 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
689
690 case BUILT_IN_SYNC_OR_AND_FETCH_1:
691 case BUILT_IN_SYNC_OR_AND_FETCH_2:
692 case BUILT_IN_SYNC_OR_AND_FETCH_4:
693 case BUILT_IN_SYNC_OR_AND_FETCH_8:
694 case BUILT_IN_SYNC_OR_AND_FETCH_16:
695
696 case BUILT_IN_SYNC_AND_AND_FETCH_1:
697 case BUILT_IN_SYNC_AND_AND_FETCH_2:
698 case BUILT_IN_SYNC_AND_AND_FETCH_4:
699 case BUILT_IN_SYNC_AND_AND_FETCH_8:
700 case BUILT_IN_SYNC_AND_AND_FETCH_16:
701
702 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
703 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
704 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
705 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
706 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
707
708 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
709 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
710 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
711 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
712
713 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
714 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
715 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
716 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
717 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
718
719 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
720 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
721 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
722 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
723 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
724
725 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
726 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
727 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
728 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
729 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
730
731 case BUILT_IN_SYNC_LOCK_RELEASE_1:
732 case BUILT_IN_SYNC_LOCK_RELEASE_2:
733 case BUILT_IN_SYNC_LOCK_RELEASE_4:
734 case BUILT_IN_SYNC_LOCK_RELEASE_8:
735 case BUILT_IN_SYNC_LOCK_RELEASE_16:
736
737 case BUILT_IN_ATOMIC_EXCHANGE_1:
738 case BUILT_IN_ATOMIC_EXCHANGE_2:
739 case BUILT_IN_ATOMIC_EXCHANGE_4:
740 case BUILT_IN_ATOMIC_EXCHANGE_8:
741 case BUILT_IN_ATOMIC_EXCHANGE_16:
742
743 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
744 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
745 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
746 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
747 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
748
749 case BUILT_IN_ATOMIC_STORE_1:
750 case BUILT_IN_ATOMIC_STORE_2:
751 case BUILT_IN_ATOMIC_STORE_4:
752 case BUILT_IN_ATOMIC_STORE_8:
753 case BUILT_IN_ATOMIC_STORE_16:
754
755 case BUILT_IN_ATOMIC_ADD_FETCH_1:
756 case BUILT_IN_ATOMIC_ADD_FETCH_2:
757 case BUILT_IN_ATOMIC_ADD_FETCH_4:
758 case BUILT_IN_ATOMIC_ADD_FETCH_8:
759 case BUILT_IN_ATOMIC_ADD_FETCH_16:
760
761 case BUILT_IN_ATOMIC_SUB_FETCH_1:
762 case BUILT_IN_ATOMIC_SUB_FETCH_2:
763 case BUILT_IN_ATOMIC_SUB_FETCH_4:
764 case BUILT_IN_ATOMIC_SUB_FETCH_8:
765 case BUILT_IN_ATOMIC_SUB_FETCH_16:
766
767 case BUILT_IN_ATOMIC_AND_FETCH_1:
768 case BUILT_IN_ATOMIC_AND_FETCH_2:
769 case BUILT_IN_ATOMIC_AND_FETCH_4:
770 case BUILT_IN_ATOMIC_AND_FETCH_8:
771 case BUILT_IN_ATOMIC_AND_FETCH_16:
772
773 case BUILT_IN_ATOMIC_NAND_FETCH_1:
774 case BUILT_IN_ATOMIC_NAND_FETCH_2:
775 case BUILT_IN_ATOMIC_NAND_FETCH_4:
776 case BUILT_IN_ATOMIC_NAND_FETCH_8:
777 case BUILT_IN_ATOMIC_NAND_FETCH_16:
778
779 case BUILT_IN_ATOMIC_XOR_FETCH_1:
780 case BUILT_IN_ATOMIC_XOR_FETCH_2:
781 case BUILT_IN_ATOMIC_XOR_FETCH_4:
782 case BUILT_IN_ATOMIC_XOR_FETCH_8:
783 case BUILT_IN_ATOMIC_XOR_FETCH_16:
784
785 case BUILT_IN_ATOMIC_OR_FETCH_1:
786 case BUILT_IN_ATOMIC_OR_FETCH_2:
787 case BUILT_IN_ATOMIC_OR_FETCH_4:
788 case BUILT_IN_ATOMIC_OR_FETCH_8:
789 case BUILT_IN_ATOMIC_OR_FETCH_16:
790
791 case BUILT_IN_ATOMIC_FETCH_ADD_1:
792 case BUILT_IN_ATOMIC_FETCH_ADD_2:
793 case BUILT_IN_ATOMIC_FETCH_ADD_4:
794 case BUILT_IN_ATOMIC_FETCH_ADD_8:
795 case BUILT_IN_ATOMIC_FETCH_ADD_16:
796
797 case BUILT_IN_ATOMIC_FETCH_SUB_1:
798 case BUILT_IN_ATOMIC_FETCH_SUB_2:
799 case BUILT_IN_ATOMIC_FETCH_SUB_4:
800 case BUILT_IN_ATOMIC_FETCH_SUB_8:
801 case BUILT_IN_ATOMIC_FETCH_SUB_16:
802
803 case BUILT_IN_ATOMIC_FETCH_AND_1:
804 case BUILT_IN_ATOMIC_FETCH_AND_2:
805 case BUILT_IN_ATOMIC_FETCH_AND_4:
806 case BUILT_IN_ATOMIC_FETCH_AND_8:
807 case BUILT_IN_ATOMIC_FETCH_AND_16:
808
809 case BUILT_IN_ATOMIC_FETCH_NAND_1:
810 case BUILT_IN_ATOMIC_FETCH_NAND_2:
811 case BUILT_IN_ATOMIC_FETCH_NAND_4:
812 case BUILT_IN_ATOMIC_FETCH_NAND_8:
813 case BUILT_IN_ATOMIC_FETCH_NAND_16:
814
815 case BUILT_IN_ATOMIC_FETCH_XOR_1:
816 case BUILT_IN_ATOMIC_FETCH_XOR_2:
817 case BUILT_IN_ATOMIC_FETCH_XOR_4:
818 case BUILT_IN_ATOMIC_FETCH_XOR_8:
819 case BUILT_IN_ATOMIC_FETCH_XOR_16:
820
821 case BUILT_IN_ATOMIC_FETCH_OR_1:
822 case BUILT_IN_ATOMIC_FETCH_OR_2:
823 case BUILT_IN_ATOMIC_FETCH_OR_4:
824 case BUILT_IN_ATOMIC_FETCH_OR_8:
825 case BUILT_IN_ATOMIC_FETCH_OR_16:
826 {
827 dest = gimple_call_arg (call, 0);
828 /* DEST represents the address of a memory location.
829 instrument_derefs wants the memory location, so lets
830 dereference the address DEST before handing it to
831 instrument_derefs. */
832 if (TREE_CODE (dest) == ADDR_EXPR)
833 dest = TREE_OPERAND (dest, 0);
834 else if (TREE_CODE (dest) == SSA_NAME || TREE_CODE (dest) == INTEGER_CST)
835 dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
836 dest, build_int_cst (TREE_TYPE (dest), 0));
837 else
838 gcc_unreachable ();
839
840 access_size = int_size_in_bytes (TREE_TYPE (dest));
841 }
842
843 default:
844 /* The other builtins memory access are not instrumented in this
845 function because they either don't have any length parameter,
846 or their length parameter is just a limit. */
847 break;
848 }
849
850 if (len != NULL_TREE)
851 {
852 if (source0 != NULL_TREE)
853 {
854 src0->start = source0;
855 src0->access_size = access_size;
856 *src0_len = len;
857 *src0_is_store = false;
858 }
859
860 if (source1 != NULL_TREE)
861 {
862 src1->start = source1;
863 src1->access_size = access_size;
864 *src1_len = len;
865 *src1_is_store = false;
866 }
867
868 if (dest != NULL_TREE)
869 {
870 dst->start = dest;
871 dst->access_size = access_size;
872 *dst_len = len;
873 *dst_is_store = true;
874 }
875
876 got_reference_p = true;
877 }
878 else if (dest)
879 {
880 dst->start = dest;
881 dst->access_size = access_size;
882 *dst_len = NULL_TREE;
883 *dst_is_store = is_store;
884 *dest_is_deref = true;
885 got_reference_p = true;
886 }
887
888 return got_reference_p;
889 }
890
891 /* Return true iff a given gimple statement has been instrumented.
892 Note that the statement is "defined" by the memory references it
893 contains. */
894
895 static bool
896 has_stmt_been_instrumented_p (gimple stmt)
897 {
898 if (gimple_assign_single_p (stmt))
899 {
900 bool r_is_store;
901 asan_mem_ref r;
902 asan_mem_ref_init (&r, NULL, 1);
903
904 if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
905 &r_is_store))
906 return has_mem_ref_been_instrumented (&r);
907 }
908 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
909 {
910 asan_mem_ref src0, src1, dest;
911 asan_mem_ref_init (&src0, NULL, 1);
912 asan_mem_ref_init (&src1, NULL, 1);
913 asan_mem_ref_init (&dest, NULL, 1);
914
915 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
916 bool src0_is_store = false, src1_is_store = false,
917 dest_is_store = false, dest_is_deref = false, intercepted_p = true;
918 if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
919 &src0, &src0_len, &src0_is_store,
920 &src1, &src1_len, &src1_is_store,
921 &dest, &dest_len, &dest_is_store,
922 &dest_is_deref, &intercepted_p))
923 {
924 if (src0.start != NULL_TREE
925 && !has_mem_ref_been_instrumented (&src0, src0_len))
926 return false;
927
928 if (src1.start != NULL_TREE
929 && !has_mem_ref_been_instrumented (&src1, src1_len))
930 return false;
931
932 if (dest.start != NULL_TREE
933 && !has_mem_ref_been_instrumented (&dest, dest_len))
934 return false;
935
936 return true;
937 }
938 }
939 return false;
940 }
941
942 /* Insert a memory reference into the hash table. */
943
944 static void
945 update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
946 {
947 hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
948
949 asan_mem_ref r;
950 asan_mem_ref_init (&r, ref, access_size);
951
952 asan_mem_ref **slot = ht->find_slot (&r, INSERT);
953 if (*slot == NULL || (*slot)->access_size < access_size)
954 *slot = asan_mem_ref_new (ref, access_size);
955 }
956
957 /* Initialize shadow_ptr_types array. */
958
959 static void
960 asan_init_shadow_ptr_types (void)
961 {
962 asan_shadow_set = new_alias_set ();
963 shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
964 TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
965 shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
966 shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
967 TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
968 shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
969 initialize_sanitizer_builtins ();
970 }
971
972 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
973
974 static tree
975 asan_pp_string (pretty_printer *pp)
976 {
977 const char *buf = pp_formatted_text (pp);
978 size_t len = strlen (buf);
979 tree ret = build_string (len + 1, buf);
980 TREE_TYPE (ret)
981 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
982 build_index_type (size_int (len)));
983 TREE_READONLY (ret) = 1;
984 TREE_STATIC (ret) = 1;
985 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
986 }
987
988 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
989
990 static rtx
991 asan_shadow_cst (unsigned char shadow_bytes[4])
992 {
993 int i;
994 unsigned HOST_WIDE_INT val = 0;
995 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
996 for (i = 0; i < 4; i++)
997 val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
998 << (BITS_PER_UNIT * i);
999 return gen_int_mode (val, SImode);
1000 }
1001
1002 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
1003 though. */
1004
1005 static void
1006 asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
1007 {
1008 rtx_insn *insn, *insns, *jump;
1009 rtx_code_label *top_label;
1010 rtx end, addr, tmp;
1011
1012 start_sequence ();
1013 clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
1014 insns = get_insns ();
1015 end_sequence ();
1016 for (insn = insns; insn; insn = NEXT_INSN (insn))
1017 if (CALL_P (insn))
1018 break;
1019 if (insn == NULL_RTX)
1020 {
1021 emit_insn (insns);
1022 return;
1023 }
1024
1025 gcc_assert ((len & 3) == 0);
1026 top_label = gen_label_rtx ();
1027 addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
1028 shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
1029 end = force_reg (Pmode, plus_constant (Pmode, addr, len));
1030 emit_label (top_label);
1031
1032 emit_move_insn (shadow_mem, const0_rtx);
1033 tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
1034 true, OPTAB_LIB_WIDEN);
1035 if (tmp != addr)
1036 emit_move_insn (addr, tmp);
1037 emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
1038 jump = get_last_insn ();
1039 gcc_assert (JUMP_P (jump));
1040 add_int_reg_note (jump, REG_BR_PROB, REG_BR_PROB_BASE * 80 / 100);
1041 }
1042
1043 void
1044 asan_function_start (void)
1045 {
1046 section *fnsec = function_section (current_function_decl);
1047 switch_to_section (fnsec);
1048 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
1049 current_function_funcdef_no);
1050 }
1051
1052 /* Insert code to protect stack vars. The prologue sequence should be emitted
1053 directly, epilogue sequence returned. BASE is the register holding the
1054 stack base, against which OFFSETS array offsets are relative to, OFFSETS
1055 array contains pairs of offsets in reverse order, always the end offset
1056 of some gap that needs protection followed by starting offset,
1057 and DECLS is an array of representative decls for each var partition.
1058 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1059 elements long (OFFSETS include gap before the first variable as well
1060 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
1061 register which stack vars DECL_RTLs are based on. Either BASE should be
1062 assigned to PBASE, when not doing use after return protection, or
1063 corresponding address based on __asan_stack_malloc* return value. */
1064
1065 rtx_insn *
1066 asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
1067 HOST_WIDE_INT *offsets, tree *decls, int length)
1068 {
1069 rtx shadow_base, shadow_mem, ret, mem, orig_base;
1070 rtx_code_label *lab;
1071 rtx_insn *insns;
1072 char buf[30];
1073 unsigned char shadow_bytes[4];
1074 HOST_WIDE_INT base_offset = offsets[length - 1];
1075 HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
1076 HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
1077 HOST_WIDE_INT last_offset, last_size;
1078 int l;
1079 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
1080 tree str_cst, decl, id;
1081 int use_after_return_class = -1;
1082
1083 if (shadow_ptr_types[0] == NULL_TREE)
1084 asan_init_shadow_ptr_types ();
1085
1086 /* First of all, prepare the description string. */
1087 pretty_printer asan_pp;
1088
1089 pp_decimal_int (&asan_pp, length / 2 - 1);
1090 pp_space (&asan_pp);
1091 for (l = length - 2; l; l -= 2)
1092 {
1093 tree decl = decls[l / 2 - 1];
1094 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
1095 pp_space (&asan_pp);
1096 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
1097 pp_space (&asan_pp);
1098 if (DECL_P (decl) && DECL_NAME (decl))
1099 {
1100 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
1101 pp_space (&asan_pp);
1102 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
1103 }
1104 else
1105 pp_string (&asan_pp, "9 <unknown>");
1106 pp_space (&asan_pp);
1107 }
1108 str_cst = asan_pp_string (&asan_pp);
1109
1110 /* Emit the prologue sequence. */
1111 if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
1112 && ASAN_USE_AFTER_RETURN)
1113 {
1114 use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
1115 /* __asan_stack_malloc_N guarantees alignment
1116 N < 6 ? (64 << N) : 4096 bytes. */
1117 if (alignb > (use_after_return_class < 6
1118 ? (64U << use_after_return_class) : 4096U))
1119 use_after_return_class = -1;
1120 else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
1121 base_align_bias = ((asan_frame_size + alignb - 1)
1122 & ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
1123 }
1124 /* Align base if target is STRICT_ALIGNMENT. */
1125 if (STRICT_ALIGNMENT)
1126 base = expand_binop (Pmode, and_optab, base,
1127 gen_int_mode (-((GET_MODE_ALIGNMENT (SImode)
1128 << ASAN_SHADOW_SHIFT)
1129 / BITS_PER_UNIT), Pmode), NULL_RTX,
1130 1, OPTAB_DIRECT);
1131
1132 if (use_after_return_class == -1 && pbase)
1133 emit_move_insn (pbase, base);
1134
1135 base = expand_binop (Pmode, add_optab, base,
1136 gen_int_mode (base_offset - base_align_bias, Pmode),
1137 NULL_RTX, 1, OPTAB_DIRECT);
1138 orig_base = NULL_RTX;
1139 if (use_after_return_class != -1)
1140 {
1141 if (asan_detect_stack_use_after_return == NULL_TREE)
1142 {
1143 id = get_identifier ("__asan_option_detect_stack_use_after_return");
1144 decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
1145 integer_type_node);
1146 SET_DECL_ASSEMBLER_NAME (decl, id);
1147 TREE_ADDRESSABLE (decl) = 1;
1148 DECL_ARTIFICIAL (decl) = 1;
1149 DECL_IGNORED_P (decl) = 1;
1150 DECL_EXTERNAL (decl) = 1;
1151 TREE_STATIC (decl) = 1;
1152 TREE_PUBLIC (decl) = 1;
1153 TREE_USED (decl) = 1;
1154 asan_detect_stack_use_after_return = decl;
1155 }
1156 orig_base = gen_reg_rtx (Pmode);
1157 emit_move_insn (orig_base, base);
1158 ret = expand_normal (asan_detect_stack_use_after_return);
1159 lab = gen_label_rtx ();
1160 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1161 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1162 VOIDmode, 0, lab, very_likely);
1163 snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
1164 use_after_return_class);
1165 ret = init_one_libfunc (buf);
1166 rtx addr = convert_memory_address (ptr_mode, base);
1167 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 2,
1168 GEN_INT (asan_frame_size
1169 + base_align_bias),
1170 TYPE_MODE (pointer_sized_int_node),
1171 addr, ptr_mode);
1172 ret = convert_memory_address (Pmode, ret);
1173 emit_move_insn (base, ret);
1174 emit_label (lab);
1175 emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
1176 gen_int_mode (base_align_bias
1177 - base_offset, Pmode),
1178 NULL_RTX, 1, OPTAB_DIRECT));
1179 }
1180 mem = gen_rtx_MEM (ptr_mode, base);
1181 mem = adjust_address (mem, VOIDmode, base_align_bias);
1182 emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
1183 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1184 emit_move_insn (mem, expand_normal (str_cst));
1185 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1186 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
1187 id = get_identifier (buf);
1188 decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1189 VAR_DECL, id, char_type_node);
1190 SET_DECL_ASSEMBLER_NAME (decl, id);
1191 TREE_ADDRESSABLE (decl) = 1;
1192 TREE_READONLY (decl) = 1;
1193 DECL_ARTIFICIAL (decl) = 1;
1194 DECL_IGNORED_P (decl) = 1;
1195 TREE_STATIC (decl) = 1;
1196 TREE_PUBLIC (decl) = 0;
1197 TREE_USED (decl) = 1;
1198 DECL_INITIAL (decl) = decl;
1199 TREE_ASM_WRITTEN (decl) = 1;
1200 TREE_ASM_WRITTEN (id) = 1;
1201 emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
1202 shadow_base = expand_binop (Pmode, lshr_optab, base,
1203 GEN_INT (ASAN_SHADOW_SHIFT),
1204 NULL_RTX, 1, OPTAB_DIRECT);
1205 shadow_base
1206 = plus_constant (Pmode, shadow_base,
1207 asan_shadow_offset ()
1208 + (base_align_bias >> ASAN_SHADOW_SHIFT));
1209 gcc_assert (asan_shadow_set != -1
1210 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1211 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
1212 set_mem_alias_set (shadow_mem, asan_shadow_set);
1213 if (STRICT_ALIGNMENT)
1214 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1215 prev_offset = base_offset;
1216 for (l = length; l; l -= 2)
1217 {
1218 if (l == 2)
1219 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1220 offset = offsets[l - 1];
1221 if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
1222 {
1223 int i;
1224 HOST_WIDE_INT aoff
1225 = base_offset + ((offset - base_offset)
1226 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1227 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1228 (aoff - prev_offset)
1229 >> ASAN_SHADOW_SHIFT);
1230 prev_offset = aoff;
1231 for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
1232 if (aoff < offset)
1233 {
1234 if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
1235 shadow_bytes[i] = 0;
1236 else
1237 shadow_bytes[i] = offset - aoff;
1238 }
1239 else
1240 shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
1241 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1242 offset = aoff;
1243 }
1244 while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
1245 {
1246 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1247 (offset - prev_offset)
1248 >> ASAN_SHADOW_SHIFT);
1249 prev_offset = offset;
1250 memset (shadow_bytes, cur_shadow_byte, 4);
1251 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1252 offset += ASAN_RED_ZONE_SIZE;
1253 }
1254 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1255 }
1256 do_pending_stack_adjust ();
1257
1258 /* Construct epilogue sequence. */
1259 start_sequence ();
1260
1261 lab = NULL;
1262 if (use_after_return_class != -1)
1263 {
1264 rtx_code_label *lab2 = gen_label_rtx ();
1265 char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
1266 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1267 emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
1268 VOIDmode, 0, lab2, very_likely);
1269 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1270 set_mem_alias_set (shadow_mem, asan_shadow_set);
1271 mem = gen_rtx_MEM (ptr_mode, base);
1272 mem = adjust_address (mem, VOIDmode, base_align_bias);
1273 emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
1274 unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
1275 if (use_after_return_class < 5
1276 && can_store_by_pieces (sz, builtin_memset_read_str, &c,
1277 BITS_PER_UNIT, true))
1278 store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
1279 BITS_PER_UNIT, true, 0);
1280 else if (use_after_return_class >= 5
1281 || !set_storage_via_setmem (shadow_mem,
1282 GEN_INT (sz),
1283 gen_int_mode (c, QImode),
1284 BITS_PER_UNIT, BITS_PER_UNIT,
1285 -1, sz, sz, sz))
1286 {
1287 snprintf (buf, sizeof buf, "__asan_stack_free_%d",
1288 use_after_return_class);
1289 ret = init_one_libfunc (buf);
1290 rtx addr = convert_memory_address (ptr_mode, base);
1291 rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
1292 emit_library_call (ret, LCT_NORMAL, ptr_mode, 3, addr, ptr_mode,
1293 GEN_INT (asan_frame_size + base_align_bias),
1294 TYPE_MODE (pointer_sized_int_node),
1295 orig_addr, ptr_mode);
1296 }
1297 lab = gen_label_rtx ();
1298 emit_jump (lab);
1299 emit_label (lab2);
1300 }
1301
1302 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1303 set_mem_alias_set (shadow_mem, asan_shadow_set);
1304
1305 if (STRICT_ALIGNMENT)
1306 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1307
1308 prev_offset = base_offset;
1309 last_offset = base_offset;
1310 last_size = 0;
1311 for (l = length; l; l -= 2)
1312 {
1313 offset = base_offset + ((offsets[l - 1] - base_offset)
1314 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1315 if (last_offset + last_size != offset)
1316 {
1317 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1318 (last_offset - prev_offset)
1319 >> ASAN_SHADOW_SHIFT);
1320 prev_offset = last_offset;
1321 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1322 last_offset = offset;
1323 last_size = 0;
1324 }
1325 last_size += base_offset + ((offsets[l - 2] - base_offset)
1326 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1327 - offset;
1328 }
1329 if (last_size)
1330 {
1331 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1332 (last_offset - prev_offset)
1333 >> ASAN_SHADOW_SHIFT);
1334 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1335 }
1336
1337 do_pending_stack_adjust ();
1338 if (lab)
1339 emit_label (lab);
1340
1341 insns = get_insns ();
1342 end_sequence ();
1343 return insns;
1344 }
1345
1346 /* Return true if DECL, a global var, might be overridden and needs
1347 therefore a local alias. */
1348
1349 static bool
1350 asan_needs_local_alias (tree decl)
1351 {
1352 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1353 }
1354
1355 /* Return true if DECL is a VAR_DECL that should be protected
1356 by Address Sanitizer, by appending a red zone with protected
1357 shadow memory after it and aligning it to at least
1358 ASAN_RED_ZONE_SIZE bytes. */
1359
1360 bool
1361 asan_protect_global (tree decl)
1362 {
1363 if (!ASAN_GLOBALS)
1364 return false;
1365
1366 rtx rtl, symbol;
1367
1368 if (TREE_CODE (decl) == STRING_CST)
1369 {
1370 /* Instrument all STRING_CSTs except those created
1371 by asan_pp_string here. */
1372 if (shadow_ptr_types[0] != NULL_TREE
1373 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1374 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1375 return false;
1376 return true;
1377 }
1378 if (TREE_CODE (decl) != VAR_DECL
1379 /* TLS vars aren't statically protectable. */
1380 || DECL_THREAD_LOCAL_P (decl)
1381 /* Externs will be protected elsewhere. */
1382 || DECL_EXTERNAL (decl)
1383 || !DECL_RTL_SET_P (decl)
1384 /* Comdat vars pose an ABI problem, we can't know if
1385 the var that is selected by the linker will have
1386 padding or not. */
1387 || DECL_ONE_ONLY (decl)
1388 /* Similarly for common vars. People can use -fno-common.
1389 Note: Linux kernel is built with -fno-common, so we do instrument
1390 globals there even if it is C. */
1391 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
1392 /* Don't protect if using user section, often vars placed
1393 into user section from multiple TUs are then assumed
1394 to be an array of such vars, putting padding in there
1395 breaks this assumption. */
1396 || (DECL_SECTION_NAME (decl) != NULL
1397 && !symtab_node::get (decl)->implicit_section
1398 && !section_sanitized_p (DECL_SECTION_NAME (decl)))
1399 || DECL_SIZE (decl) == 0
1400 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1401 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1402 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
1403 || TREE_TYPE (decl) == ubsan_get_source_location_type ())
1404 return false;
1405
1406 rtl = DECL_RTL (decl);
1407 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1408 return false;
1409 symbol = XEXP (rtl, 0);
1410
1411 if (CONSTANT_POOL_ADDRESS_P (symbol)
1412 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1413 return false;
1414
1415 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1416 return false;
1417
1418 #ifndef ASM_OUTPUT_DEF
1419 if (asan_needs_local_alias (decl))
1420 return false;
1421 #endif
1422
1423 return true;
1424 }
1425
1426 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1427 IS_STORE is either 1 (for a store) or 0 (for a load). */
1428
1429 static tree
1430 report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1431 int *nargs)
1432 {
1433 static enum built_in_function report[2][2][6]
1434 = { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1435 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1436 BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
1437 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1438 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1439 BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } },
1440 { { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT,
1441 BUILT_IN_ASAN_REPORT_LOAD2_NOABORT,
1442 BUILT_IN_ASAN_REPORT_LOAD4_NOABORT,
1443 BUILT_IN_ASAN_REPORT_LOAD8_NOABORT,
1444 BUILT_IN_ASAN_REPORT_LOAD16_NOABORT,
1445 BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT },
1446 { BUILT_IN_ASAN_REPORT_STORE1_NOABORT,
1447 BUILT_IN_ASAN_REPORT_STORE2_NOABORT,
1448 BUILT_IN_ASAN_REPORT_STORE4_NOABORT,
1449 BUILT_IN_ASAN_REPORT_STORE8_NOABORT,
1450 BUILT_IN_ASAN_REPORT_STORE16_NOABORT,
1451 BUILT_IN_ASAN_REPORT_STORE_N_NOABORT } } };
1452 if (size_in_bytes == -1)
1453 {
1454 *nargs = 2;
1455 return builtin_decl_implicit (report[recover_p][is_store][5]);
1456 }
1457 *nargs = 1;
1458 int size_log2 = exact_log2 (size_in_bytes);
1459 return builtin_decl_implicit (report[recover_p][is_store][size_log2]);
1460 }
1461
1462 /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1463 IS_STORE is either 1 (for a store) or 0 (for a load). */
1464
1465 static tree
1466 check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1467 int *nargs)
1468 {
1469 static enum built_in_function check[2][2][6]
1470 = { { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
1471 BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
1472 BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
1473 { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
1474 BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
1475 BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } },
1476 { { BUILT_IN_ASAN_LOAD1_NOABORT,
1477 BUILT_IN_ASAN_LOAD2_NOABORT,
1478 BUILT_IN_ASAN_LOAD4_NOABORT,
1479 BUILT_IN_ASAN_LOAD8_NOABORT,
1480 BUILT_IN_ASAN_LOAD16_NOABORT,
1481 BUILT_IN_ASAN_LOADN_NOABORT },
1482 { BUILT_IN_ASAN_STORE1_NOABORT,
1483 BUILT_IN_ASAN_STORE2_NOABORT,
1484 BUILT_IN_ASAN_STORE4_NOABORT,
1485 BUILT_IN_ASAN_STORE8_NOABORT,
1486 BUILT_IN_ASAN_STORE16_NOABORT,
1487 BUILT_IN_ASAN_STOREN_NOABORT } } };
1488 if (size_in_bytes == -1)
1489 {
1490 *nargs = 2;
1491 return builtin_decl_implicit (check[recover_p][is_store][5]);
1492 }
1493 *nargs = 1;
1494 int size_log2 = exact_log2 (size_in_bytes);
1495 return builtin_decl_implicit (check[recover_p][is_store][size_log2]);
1496 }
1497
1498 /* Split the current basic block and create a condition statement
1499 insertion point right before or after the statement pointed to by
1500 ITER. Return an iterator to the point at which the caller might
1501 safely insert the condition statement.
1502
1503 THEN_BLOCK must be set to the address of an uninitialized instance
1504 of basic_block. The function will then set *THEN_BLOCK to the
1505 'then block' of the condition statement to be inserted by the
1506 caller.
1507
1508 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1509 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1510
1511 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1512 block' of the condition statement to be inserted by the caller.
1513
1514 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1515 statements starting from *ITER, and *THEN_BLOCK is a new empty
1516 block.
1517
1518 *ITER is adjusted to point to always point to the first statement
1519 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1520 same as what ITER was pointing to prior to calling this function,
1521 if BEFORE_P is true; otherwise, it is its following statement. */
1522
1523 gimple_stmt_iterator
1524 create_cond_insert_point (gimple_stmt_iterator *iter,
1525 bool before_p,
1526 bool then_more_likely_p,
1527 bool create_then_fallthru_edge,
1528 basic_block *then_block,
1529 basic_block *fallthrough_block)
1530 {
1531 gimple_stmt_iterator gsi = *iter;
1532
1533 if (!gsi_end_p (gsi) && before_p)
1534 gsi_prev (&gsi);
1535
1536 basic_block cur_bb = gsi_bb (*iter);
1537
1538 edge e = split_block (cur_bb, gsi_stmt (gsi));
1539
1540 /* Get a hold on the 'condition block', the 'then block' and the
1541 'else block'. */
1542 basic_block cond_bb = e->src;
1543 basic_block fallthru_bb = e->dest;
1544 basic_block then_bb = create_empty_bb (cond_bb);
1545 if (current_loops)
1546 {
1547 add_bb_to_loop (then_bb, cond_bb->loop_father);
1548 loops_state_set (LOOPS_NEED_FIXUP);
1549 }
1550
1551 /* Set up the newly created 'then block'. */
1552 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1553 int fallthrough_probability
1554 = then_more_likely_p
1555 ? PROB_VERY_UNLIKELY
1556 : PROB_ALWAYS - PROB_VERY_UNLIKELY;
1557 e->probability = PROB_ALWAYS - fallthrough_probability;
1558 if (create_then_fallthru_edge)
1559 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1560
1561 /* Set up the fallthrough basic block. */
1562 e = find_edge (cond_bb, fallthru_bb);
1563 e->flags = EDGE_FALSE_VALUE;
1564 e->count = cond_bb->count;
1565 e->probability = fallthrough_probability;
1566
1567 /* Update dominance info for the newly created then_bb; note that
1568 fallthru_bb's dominance info has already been updated by
1569 split_bock. */
1570 if (dom_info_available_p (CDI_DOMINATORS))
1571 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1572
1573 *then_block = then_bb;
1574 *fallthrough_block = fallthru_bb;
1575 *iter = gsi_start_bb (fallthru_bb);
1576
1577 return gsi_last_bb (cond_bb);
1578 }
1579
1580 /* Insert an if condition followed by a 'then block' right before the
1581 statement pointed to by ITER. The fallthrough block -- which is the
1582 else block of the condition as well as the destination of the
1583 outcoming edge of the 'then block' -- starts with the statement
1584 pointed to by ITER.
1585
1586 COND is the condition of the if.
1587
1588 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1589 'then block' is higher than the probability of the edge to the
1590 fallthrough block.
1591
1592 Upon completion of the function, *THEN_BB is set to the newly
1593 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1594 fallthrough block.
1595
1596 *ITER is adjusted to still point to the same statement it was
1597 pointing to initially. */
1598
1599 static void
1600 insert_if_then_before_iter (gcond *cond,
1601 gimple_stmt_iterator *iter,
1602 bool then_more_likely_p,
1603 basic_block *then_bb,
1604 basic_block *fallthrough_bb)
1605 {
1606 gimple_stmt_iterator cond_insert_point =
1607 create_cond_insert_point (iter,
1608 /*before_p=*/true,
1609 then_more_likely_p,
1610 /*create_then_fallthru_edge=*/true,
1611 then_bb,
1612 fallthrough_bb);
1613 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
1614 }
1615
1616 /* Build
1617 (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset (). */
1618
1619 static tree
1620 build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
1621 tree base_addr, tree shadow_ptr_type)
1622 {
1623 tree t, uintptr_type = TREE_TYPE (base_addr);
1624 tree shadow_type = TREE_TYPE (shadow_ptr_type);
1625 gimple g;
1626
1627 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
1628 g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
1629 base_addr, t);
1630 gimple_set_location (g, location);
1631 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1632
1633 t = build_int_cst (uintptr_type, asan_shadow_offset ());
1634 g = gimple_build_assign (make_ssa_name (uintptr_type), PLUS_EXPR,
1635 gimple_assign_lhs (g), t);
1636 gimple_set_location (g, location);
1637 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1638
1639 g = gimple_build_assign (make_ssa_name (shadow_ptr_type), NOP_EXPR,
1640 gimple_assign_lhs (g));
1641 gimple_set_location (g, location);
1642 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1643
1644 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
1645 build_int_cst (shadow_ptr_type, 0));
1646 g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
1647 gimple_set_location (g, location);
1648 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1649 return gimple_assign_lhs (g);
1650 }
1651
1652 /* BASE can already be an SSA_NAME; in that case, do not create a
1653 new SSA_NAME for it. */
1654
1655 static tree
1656 maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
1657 bool before_p)
1658 {
1659 if (TREE_CODE (base) == SSA_NAME)
1660 return base;
1661 gimple g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)),
1662 TREE_CODE (base), base);
1663 gimple_set_location (g, loc);
1664 if (before_p)
1665 gsi_insert_before (iter, g, GSI_SAME_STMT);
1666 else
1667 gsi_insert_after (iter, g, GSI_NEW_STMT);
1668 return gimple_assign_lhs (g);
1669 }
1670
1671 /* LEN can already have necessary size and precision;
1672 in that case, do not create a new variable. */
1673
1674 tree
1675 maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
1676 bool before_p)
1677 {
1678 if (ptrofftype_p (len))
1679 return len;
1680 gimple g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
1681 NOP_EXPR, len);
1682 gimple_set_location (g, loc);
1683 if (before_p)
1684 gsi_insert_before (iter, g, GSI_SAME_STMT);
1685 else
1686 gsi_insert_after (iter, g, GSI_NEW_STMT);
1687 return gimple_assign_lhs (g);
1688 }
1689
1690 /* Instrument the memory access instruction BASE. Insert new
1691 statements before or after ITER.
1692
1693 Note that the memory access represented by BASE can be either an
1694 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1695 location. IS_STORE is TRUE for a store, FALSE for a load.
1696 BEFORE_P is TRUE for inserting the instrumentation code before
1697 ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
1698 for a scalar memory access and FALSE for memory region access.
1699 NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
1700 length. ALIGN tells alignment of accessed memory object.
1701
1702 START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
1703 memory region have already been instrumented.
1704
1705 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1706 statement it was pointing to prior to calling this function,
1707 otherwise, it points to the statement logically following it. */
1708
1709 static void
1710 build_check_stmt (location_t loc, tree base, tree len,
1711 HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
1712 bool is_non_zero_len, bool before_p, bool is_store,
1713 bool is_scalar_access, unsigned int align = 0)
1714 {
1715 gimple_stmt_iterator gsi = *iter;
1716 gimple g;
1717
1718 gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
1719
1720 gsi = *iter;
1721
1722 base = unshare_expr (base);
1723 base = maybe_create_ssa_name (loc, base, &gsi, before_p);
1724
1725 if (len)
1726 {
1727 len = unshare_expr (len);
1728 len = maybe_cast_to_ptrmode (loc, len, iter, before_p);
1729 }
1730 else
1731 {
1732 gcc_assert (size_in_bytes != -1);
1733 len = build_int_cst (pointer_sized_int_node, size_in_bytes);
1734 }
1735
1736 if (size_in_bytes > 1)
1737 {
1738 if ((size_in_bytes & (size_in_bytes - 1)) != 0
1739 || size_in_bytes > 16)
1740 is_scalar_access = false;
1741 else if (align && align < size_in_bytes * BITS_PER_UNIT)
1742 {
1743 /* On non-strict alignment targets, if
1744 16-byte access is just 8-byte aligned,
1745 this will result in misaligned shadow
1746 memory 2 byte load, but otherwise can
1747 be handled using one read. */
1748 if (size_in_bytes != 16
1749 || STRICT_ALIGNMENT
1750 || align < 8 * BITS_PER_UNIT)
1751 is_scalar_access = false;
1752 }
1753 }
1754
1755 HOST_WIDE_INT flags = 0;
1756 if (is_store)
1757 flags |= ASAN_CHECK_STORE;
1758 if (is_non_zero_len)
1759 flags |= ASAN_CHECK_NON_ZERO_LEN;
1760 if (is_scalar_access)
1761 flags |= ASAN_CHECK_SCALAR_ACCESS;
1762
1763 g = gimple_build_call_internal (IFN_ASAN_CHECK, 4,
1764 build_int_cst (integer_type_node, flags),
1765 base, len,
1766 build_int_cst (integer_type_node,
1767 align / BITS_PER_UNIT));
1768 gimple_set_location (g, loc);
1769 if (before_p)
1770 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
1771 else
1772 {
1773 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1774 gsi_next (&gsi);
1775 *iter = gsi;
1776 }
1777 }
1778
1779 /* If T represents a memory access, add instrumentation code before ITER.
1780 LOCATION is source code location.
1781 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
1782
1783 static void
1784 instrument_derefs (gimple_stmt_iterator *iter, tree t,
1785 location_t location, bool is_store)
1786 {
1787 if (is_store && !ASAN_INSTRUMENT_WRITES)
1788 return;
1789 if (!is_store && !ASAN_INSTRUMENT_READS)
1790 return;
1791
1792 tree type, base;
1793 HOST_WIDE_INT size_in_bytes;
1794
1795 type = TREE_TYPE (t);
1796 switch (TREE_CODE (t))
1797 {
1798 case ARRAY_REF:
1799 case COMPONENT_REF:
1800 case INDIRECT_REF:
1801 case MEM_REF:
1802 case VAR_DECL:
1803 case BIT_FIELD_REF:
1804 break;
1805 /* FALLTHRU */
1806 default:
1807 return;
1808 }
1809
1810 size_in_bytes = int_size_in_bytes (type);
1811 if (size_in_bytes <= 0)
1812 return;
1813
1814 HOST_WIDE_INT bitsize, bitpos;
1815 tree offset;
1816 machine_mode mode;
1817 int volatilep = 0, unsignedp = 0;
1818 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset,
1819 &mode, &unsignedp, &volatilep, false);
1820
1821 if (TREE_CODE (t) == COMPONENT_REF
1822 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
1823 {
1824 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
1825 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
1826 TREE_OPERAND (t, 0), repr,
1827 NULL_TREE), location, is_store);
1828 return;
1829 }
1830
1831 if (bitpos % BITS_PER_UNIT
1832 || bitsize != size_in_bytes * BITS_PER_UNIT)
1833 return;
1834
1835 if (TREE_CODE (inner) == VAR_DECL
1836 && offset == NULL_TREE
1837 && bitpos >= 0
1838 && DECL_SIZE (inner)
1839 && tree_fits_shwi_p (DECL_SIZE (inner))
1840 && bitpos + bitsize <= tree_to_shwi (DECL_SIZE (inner)))
1841 {
1842 if (DECL_THREAD_LOCAL_P (inner))
1843 return;
1844 if (!ASAN_GLOBALS && is_global_var (inner))
1845 return;
1846 if (!TREE_STATIC (inner))
1847 {
1848 /* Automatic vars in the current function will be always
1849 accessible. */
1850 if (decl_function_context (inner) == current_function_decl)
1851 return;
1852 }
1853 /* Always instrument external vars, they might be dynamically
1854 initialized. */
1855 else if (!DECL_EXTERNAL (inner))
1856 {
1857 /* For static vars if they are known not to be dynamically
1858 initialized, they will be always accessible. */
1859 varpool_node *vnode = varpool_node::get (inner);
1860 if (vnode && !vnode->dynamically_initialized)
1861 return;
1862 }
1863 }
1864
1865 base = build_fold_addr_expr (t);
1866 if (!has_mem_ref_been_instrumented (base, size_in_bytes))
1867 {
1868 unsigned int align = get_object_alignment (t);
1869 build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
1870 /*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true,
1871 is_store, /*is_scalar_access*/true, align);
1872 update_mem_ref_hash_table (base, size_in_bytes);
1873 update_mem_ref_hash_table (t, size_in_bytes);
1874 }
1875
1876 }
1877
1878 /* Insert a memory reference into the hash table if access length
1879 can be determined in compile time. */
1880
1881 static void
1882 maybe_update_mem_ref_hash_table (tree base, tree len)
1883 {
1884 if (!POINTER_TYPE_P (TREE_TYPE (base))
1885 || !INTEGRAL_TYPE_P (TREE_TYPE (len)))
1886 return;
1887
1888 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1889
1890 if (size_in_bytes != -1)
1891 update_mem_ref_hash_table (base, size_in_bytes);
1892 }
1893
1894 /* Instrument an access to a contiguous memory region that starts at
1895 the address pointed to by BASE, over a length of LEN (expressed in
1896 the sizeof (*BASE) bytes). ITER points to the instruction before
1897 which the instrumentation instructions must be inserted. LOCATION
1898 is the source location that the instrumentation instructions must
1899 have. If IS_STORE is true, then the memory access is a store;
1900 otherwise, it's a load. */
1901
1902 static void
1903 instrument_mem_region_access (tree base, tree len,
1904 gimple_stmt_iterator *iter,
1905 location_t location, bool is_store)
1906 {
1907 if (!POINTER_TYPE_P (TREE_TYPE (base))
1908 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
1909 || integer_zerop (len))
1910 return;
1911
1912 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1913
1914 if ((size_in_bytes == -1)
1915 || !has_mem_ref_been_instrumented (base, size_in_bytes))
1916 {
1917 build_check_stmt (location, base, len, size_in_bytes, iter,
1918 /*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true,
1919 is_store, /*is_scalar_access*/false, /*align*/0);
1920 }
1921
1922 maybe_update_mem_ref_hash_table (base, len);
1923 *iter = gsi_for_stmt (gsi_stmt (*iter));
1924 }
1925
1926 /* Instrument the call to a built-in memory access function that is
1927 pointed to by the iterator ITER.
1928
1929 Upon completion, return TRUE iff *ITER has been advanced to the
1930 statement following the one it was originally pointing to. */
1931
1932 static bool
1933 instrument_builtin_call (gimple_stmt_iterator *iter)
1934 {
1935 if (!ASAN_MEMINTRIN)
1936 return false;
1937
1938 bool iter_advanced_p = false;
1939 gcall *call = as_a <gcall *> (gsi_stmt (*iter));
1940
1941 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
1942
1943 location_t loc = gimple_location (call);
1944
1945 asan_mem_ref src0, src1, dest;
1946 asan_mem_ref_init (&src0, NULL, 1);
1947 asan_mem_ref_init (&src1, NULL, 1);
1948 asan_mem_ref_init (&dest, NULL, 1);
1949
1950 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
1951 bool src0_is_store = false, src1_is_store = false, dest_is_store = false,
1952 dest_is_deref = false, intercepted_p = true;
1953
1954 if (get_mem_refs_of_builtin_call (call,
1955 &src0, &src0_len, &src0_is_store,
1956 &src1, &src1_len, &src1_is_store,
1957 &dest, &dest_len, &dest_is_store,
1958 &dest_is_deref, &intercepted_p))
1959 {
1960 if (dest_is_deref)
1961 {
1962 instrument_derefs (iter, dest.start, loc, dest_is_store);
1963 gsi_next (iter);
1964 iter_advanced_p = true;
1965 }
1966 else if (!intercepted_p
1967 && (src0_len || src1_len || dest_len))
1968 {
1969 if (src0.start != NULL_TREE)
1970 instrument_mem_region_access (src0.start, src0_len,
1971 iter, loc, /*is_store=*/false);
1972 if (src1.start != NULL_TREE)
1973 instrument_mem_region_access (src1.start, src1_len,
1974 iter, loc, /*is_store=*/false);
1975 if (dest.start != NULL_TREE)
1976 instrument_mem_region_access (dest.start, dest_len,
1977 iter, loc, /*is_store=*/true);
1978
1979 *iter = gsi_for_stmt (call);
1980 gsi_next (iter);
1981 iter_advanced_p = true;
1982 }
1983 else
1984 {
1985 if (src0.start != NULL_TREE)
1986 maybe_update_mem_ref_hash_table (src0.start, src0_len);
1987 if (src1.start != NULL_TREE)
1988 maybe_update_mem_ref_hash_table (src1.start, src1_len);
1989 if (dest.start != NULL_TREE)
1990 maybe_update_mem_ref_hash_table (dest.start, dest_len);
1991 }
1992 }
1993 return iter_advanced_p;
1994 }
1995
1996 /* Instrument the assignment statement ITER if it is subject to
1997 instrumentation. Return TRUE iff instrumentation actually
1998 happened. In that case, the iterator ITER is advanced to the next
1999 logical expression following the one initially pointed to by ITER,
2000 and the relevant memory reference that which access has been
2001 instrumented is added to the memory references hash table. */
2002
2003 static bool
2004 maybe_instrument_assignment (gimple_stmt_iterator *iter)
2005 {
2006 gimple s = gsi_stmt (*iter);
2007
2008 gcc_assert (gimple_assign_single_p (s));
2009
2010 tree ref_expr = NULL_TREE;
2011 bool is_store, is_instrumented = false;
2012
2013 if (gimple_store_p (s))
2014 {
2015 ref_expr = gimple_assign_lhs (s);
2016 is_store = true;
2017 instrument_derefs (iter, ref_expr,
2018 gimple_location (s),
2019 is_store);
2020 is_instrumented = true;
2021 }
2022
2023 if (gimple_assign_load_p (s))
2024 {
2025 ref_expr = gimple_assign_rhs1 (s);
2026 is_store = false;
2027 instrument_derefs (iter, ref_expr,
2028 gimple_location (s),
2029 is_store);
2030 is_instrumented = true;
2031 }
2032
2033 if (is_instrumented)
2034 gsi_next (iter);
2035
2036 return is_instrumented;
2037 }
2038
2039 /* Instrument the function call pointed to by the iterator ITER, if it
2040 is subject to instrumentation. At the moment, the only function
2041 calls that are instrumented are some built-in functions that access
2042 memory. Look at instrument_builtin_call to learn more.
2043
2044 Upon completion return TRUE iff *ITER was advanced to the statement
2045 following the one it was originally pointing to. */
2046
2047 static bool
2048 maybe_instrument_call (gimple_stmt_iterator *iter)
2049 {
2050 gimple stmt = gsi_stmt (*iter);
2051 bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
2052
2053 if (is_builtin && instrument_builtin_call (iter))
2054 return true;
2055
2056 if (gimple_call_noreturn_p (stmt))
2057 {
2058 if (is_builtin)
2059 {
2060 tree callee = gimple_call_fndecl (stmt);
2061 switch (DECL_FUNCTION_CODE (callee))
2062 {
2063 case BUILT_IN_UNREACHABLE:
2064 case BUILT_IN_TRAP:
2065 /* Don't instrument these. */
2066 return false;
2067 default:
2068 break;
2069 }
2070 }
2071 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
2072 gimple g = gimple_build_call (decl, 0);
2073 gimple_set_location (g, gimple_location (stmt));
2074 gsi_insert_before (iter, g, GSI_SAME_STMT);
2075 }
2076 return false;
2077 }
2078
2079 /* Walk each instruction of all basic block and instrument those that
2080 represent memory references: loads, stores, or function calls.
2081 In a given basic block, this function avoids instrumenting memory
2082 references that have already been instrumented. */
2083
2084 static void
2085 transform_statements (void)
2086 {
2087 basic_block bb, last_bb = NULL;
2088 gimple_stmt_iterator i;
2089 int saved_last_basic_block = last_basic_block_for_fn (cfun);
2090
2091 FOR_EACH_BB_FN (bb, cfun)
2092 {
2093 basic_block prev_bb = bb;
2094
2095 if (bb->index >= saved_last_basic_block) continue;
2096
2097 /* Flush the mem ref hash table, if current bb doesn't have
2098 exactly one predecessor, or if that predecessor (skipping
2099 over asan created basic blocks) isn't the last processed
2100 basic block. Thus we effectively flush on extended basic
2101 block boundaries. */
2102 while (single_pred_p (prev_bb))
2103 {
2104 prev_bb = single_pred (prev_bb);
2105 if (prev_bb->index < saved_last_basic_block)
2106 break;
2107 }
2108 if (prev_bb != last_bb)
2109 empty_mem_ref_hash_table ();
2110 last_bb = bb;
2111
2112 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
2113 {
2114 gimple s = gsi_stmt (i);
2115
2116 if (has_stmt_been_instrumented_p (s))
2117 gsi_next (&i);
2118 else if (gimple_assign_single_p (s)
2119 && !gimple_clobber_p (s)
2120 && maybe_instrument_assignment (&i))
2121 /* Nothing to do as maybe_instrument_assignment advanced
2122 the iterator I. */;
2123 else if (is_gimple_call (s) && maybe_instrument_call (&i))
2124 /* Nothing to do as maybe_instrument_call
2125 advanced the iterator I. */;
2126 else
2127 {
2128 /* No instrumentation happened.
2129
2130 If the current instruction is a function call that
2131 might free something, let's forget about the memory
2132 references that got instrumented. Otherwise we might
2133 miss some instrumentation opportunities. */
2134 if (is_gimple_call (s) && !nonfreeing_call_p (s))
2135 empty_mem_ref_hash_table ();
2136
2137 gsi_next (&i);
2138 }
2139 }
2140 }
2141 free_mem_ref_resources ();
2142 }
2143
2144 /* Build
2145 __asan_before_dynamic_init (module_name)
2146 or
2147 __asan_after_dynamic_init ()
2148 call. */
2149
2150 tree
2151 asan_dynamic_init_call (bool after_p)
2152 {
2153 tree fn = builtin_decl_implicit (after_p
2154 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2155 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
2156 tree module_name_cst = NULL_TREE;
2157 if (!after_p)
2158 {
2159 pretty_printer module_name_pp;
2160 pp_string (&module_name_pp, main_input_filename);
2161
2162 if (shadow_ptr_types[0] == NULL_TREE)
2163 asan_init_shadow_ptr_types ();
2164 module_name_cst = asan_pp_string (&module_name_pp);
2165 module_name_cst = fold_convert (const_ptr_type_node,
2166 module_name_cst);
2167 }
2168
2169 return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
2170 }
2171
2172 /* Build
2173 struct __asan_global
2174 {
2175 const void *__beg;
2176 uptr __size;
2177 uptr __size_with_redzone;
2178 const void *__name;
2179 const void *__module_name;
2180 uptr __has_dynamic_init;
2181 __asan_global_source_location *__location;
2182 } type. */
2183
2184 static tree
2185 asan_global_struct (void)
2186 {
2187 static const char *field_names[7]
2188 = { "__beg", "__size", "__size_with_redzone",
2189 "__name", "__module_name", "__has_dynamic_init", "__location"};
2190 tree fields[7], ret;
2191 int i;
2192
2193 ret = make_node (RECORD_TYPE);
2194 for (i = 0; i < 7; i++)
2195 {
2196 fields[i]
2197 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2198 get_identifier (field_names[i]),
2199 (i == 0 || i == 3) ? const_ptr_type_node
2200 : pointer_sized_int_node);
2201 DECL_CONTEXT (fields[i]) = ret;
2202 if (i)
2203 DECL_CHAIN (fields[i - 1]) = fields[i];
2204 }
2205 tree type_decl = build_decl (input_location, TYPE_DECL,
2206 get_identifier ("__asan_global"), ret);
2207 DECL_IGNORED_P (type_decl) = 1;
2208 DECL_ARTIFICIAL (type_decl) = 1;
2209 TYPE_FIELDS (ret) = fields[0];
2210 TYPE_NAME (ret) = type_decl;
2211 TYPE_STUB_DECL (ret) = type_decl;
2212 layout_type (ret);
2213 return ret;
2214 }
2215
2216 /* Append description of a single global DECL into vector V.
2217 TYPE is __asan_global struct type as returned by asan_global_struct. */
2218
2219 static void
2220 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
2221 {
2222 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2223 unsigned HOST_WIDE_INT size;
2224 tree str_cst, module_name_cst, refdecl = decl;
2225 vec<constructor_elt, va_gc> *vinner = NULL;
2226
2227 pretty_printer asan_pp, module_name_pp;
2228
2229 if (DECL_NAME (decl))
2230 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
2231 else
2232 pp_string (&asan_pp, "<unknown>");
2233 str_cst = asan_pp_string (&asan_pp);
2234
2235 pp_string (&module_name_pp, main_input_filename);
2236 module_name_cst = asan_pp_string (&module_name_pp);
2237
2238 if (asan_needs_local_alias (decl))
2239 {
2240 char buf[20];
2241 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
2242 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2243 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2244 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2245 TREE_READONLY (refdecl) = TREE_READONLY (decl);
2246 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2247 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2248 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2249 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2250 TREE_STATIC (refdecl) = 1;
2251 TREE_PUBLIC (refdecl) = 0;
2252 TREE_USED (refdecl) = 1;
2253 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2254 }
2255
2256 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2257 fold_convert (const_ptr_type_node,
2258 build_fold_addr_expr (refdecl)));
2259 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2260 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2261 size += asan_red_zone_size (size);
2262 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2263 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2264 fold_convert (const_ptr_type_node, str_cst));
2265 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2266 fold_convert (const_ptr_type_node, module_name_cst));
2267 varpool_node *vnode = varpool_node::get (decl);
2268 int has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
2269 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2270 build_int_cst (uptr, has_dynamic_init));
2271 tree locptr = NULL_TREE;
2272 location_t loc = DECL_SOURCE_LOCATION (decl);
2273 expanded_location xloc = expand_location (loc);
2274 if (xloc.file != NULL)
2275 {
2276 static int lasanloccnt = 0;
2277 char buf[25];
2278 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt);
2279 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2280 ubsan_get_source_location_type ());
2281 TREE_STATIC (var) = 1;
2282 TREE_PUBLIC (var) = 0;
2283 DECL_ARTIFICIAL (var) = 1;
2284 DECL_IGNORED_P (var) = 1;
2285 pretty_printer filename_pp;
2286 pp_string (&filename_pp, xloc.file);
2287 tree str = asan_pp_string (&filename_pp);
2288 tree ctor = build_constructor_va (TREE_TYPE (var), 3,
2289 NULL_TREE, str, NULL_TREE,
2290 build_int_cst (unsigned_type_node,
2291 xloc.line), NULL_TREE,
2292 build_int_cst (unsigned_type_node,
2293 xloc.column));
2294 TREE_CONSTANT (ctor) = 1;
2295 TREE_STATIC (ctor) = 1;
2296 DECL_INITIAL (var) = ctor;
2297 varpool_node::finalize_decl (var);
2298 locptr = fold_convert (uptr, build_fold_addr_expr (var));
2299 }
2300 else
2301 locptr = build_int_cst (uptr, 0);
2302 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr);
2303 init = build_constructor (type, vinner);
2304 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2305 }
2306
2307 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2308 void
2309 initialize_sanitizer_builtins (void)
2310 {
2311 tree decl;
2312
2313 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2314 return;
2315
2316 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2317 tree BT_FN_VOID_PTR
2318 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2319 tree BT_FN_VOID_CONST_PTR
2320 = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
2321 tree BT_FN_VOID_PTR_PTR
2322 = build_function_type_list (void_type_node, ptr_type_node,
2323 ptr_type_node, NULL_TREE);
2324 tree BT_FN_VOID_PTR_PTR_PTR
2325 = build_function_type_list (void_type_node, ptr_type_node,
2326 ptr_type_node, ptr_type_node, NULL_TREE);
2327 tree BT_FN_VOID_PTR_PTRMODE
2328 = build_function_type_list (void_type_node, ptr_type_node,
2329 pointer_sized_int_node, NULL_TREE);
2330 tree BT_FN_VOID_INT
2331 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
2332 tree BT_FN_SIZE_CONST_PTR_INT
2333 = build_function_type_list (size_type_node, const_ptr_type_node,
2334 integer_type_node, NULL_TREE);
2335 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2336 tree BT_FN_IX_CONST_VPTR_INT[5];
2337 tree BT_FN_IX_VPTR_IX_INT[5];
2338 tree BT_FN_VOID_VPTR_IX_INT[5];
2339 tree vptr
2340 = build_pointer_type (build_qualified_type (void_type_node,
2341 TYPE_QUAL_VOLATILE));
2342 tree cvptr
2343 = build_pointer_type (build_qualified_type (void_type_node,
2344 TYPE_QUAL_VOLATILE
2345 |TYPE_QUAL_CONST));
2346 tree boolt
2347 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2348 int i;
2349 for (i = 0; i < 5; i++)
2350 {
2351 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2352 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2353 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
2354 integer_type_node, integer_type_node,
2355 NULL_TREE);
2356 BT_FN_IX_CONST_VPTR_INT[i]
2357 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2358 BT_FN_IX_VPTR_IX_INT[i]
2359 = build_function_type_list (ix, vptr, ix, integer_type_node,
2360 NULL_TREE);
2361 BT_FN_VOID_VPTR_IX_INT[i]
2362 = build_function_type_list (void_type_node, vptr, ix,
2363 integer_type_node, NULL_TREE);
2364 }
2365 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2366 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2367 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2368 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2369 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2370 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2371 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2372 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2373 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2374 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2375 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2376 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2377 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2378 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2379 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2380 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2381 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2382 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2383 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2384 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2385 #undef ATTR_NOTHROW_LEAF_LIST
2386 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2387 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2388 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2389 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2390 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2391 #undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2392 #define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
2393 ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
2394 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2395 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2396 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2397 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2398 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2399 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2400 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2401 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2402 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2403 #undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
2404 #define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
2405 /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2406 #undef ATTR_PURE_NOTHROW_LEAF_LIST
2407 #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
2408 #undef DEF_SANITIZER_BUILTIN
2409 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2410 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2411 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2412 set_call_expr_flags (decl, ATTRS); \
2413 set_builtin_decl (ENUM, decl, true);
2414
2415 #include "sanitizer.def"
2416
2417 /* -fsanitize=object-size uses __builtin_object_size, but that might
2418 not be available for e.g. Fortran at this point. We use
2419 DEF_SANITIZER_BUILTIN here only as a convenience macro. */
2420 if ((flag_sanitize & SANITIZE_OBJECT_SIZE)
2421 && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE))
2422 DEF_SANITIZER_BUILTIN (BUILT_IN_OBJECT_SIZE, "object_size",
2423 BT_FN_SIZE_CONST_PTR_INT,
2424 ATTR_PURE_NOTHROW_LEAF_LIST)
2425
2426 #undef DEF_SANITIZER_BUILTIN
2427 }
2428
2429 /* Called via htab_traverse. Count number of emitted
2430 STRING_CSTs in the constant hash table. */
2431
2432 int
2433 count_string_csts (constant_descriptor_tree **slot,
2434 unsigned HOST_WIDE_INT *data)
2435 {
2436 struct constant_descriptor_tree *desc = *slot;
2437 if (TREE_CODE (desc->value) == STRING_CST
2438 && TREE_ASM_WRITTEN (desc->value)
2439 && asan_protect_global (desc->value))
2440 ++*data;
2441 return 1;
2442 }
2443
2444 /* Helper structure to pass two parameters to
2445 add_string_csts. */
2446
2447 struct asan_add_string_csts_data
2448 {
2449 tree type;
2450 vec<constructor_elt, va_gc> *v;
2451 };
2452
2453 /* Called via hash_table::traverse. Call asan_add_global
2454 on emitted STRING_CSTs from the constant hash table. */
2455
2456 int
2457 add_string_csts (constant_descriptor_tree **slot,
2458 asan_add_string_csts_data *aascd)
2459 {
2460 struct constant_descriptor_tree *desc = *slot;
2461 if (TREE_CODE (desc->value) == STRING_CST
2462 && TREE_ASM_WRITTEN (desc->value)
2463 && asan_protect_global (desc->value))
2464 {
2465 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
2466 aascd->type, aascd->v);
2467 }
2468 return 1;
2469 }
2470
2471 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2472 invoke ggc_collect. */
2473 static GTY(()) tree asan_ctor_statements;
2474
2475 /* Module-level instrumentation.
2476 - Insert __asan_init_vN() into the list of CTORs.
2477 - TODO: insert redzones around globals.
2478 */
2479
2480 void
2481 asan_finish_file (void)
2482 {
2483 varpool_node *vnode;
2484 unsigned HOST_WIDE_INT gcount = 0;
2485
2486 if (shadow_ptr_types[0] == NULL_TREE)
2487 asan_init_shadow_ptr_types ();
2488 /* Avoid instrumenting code in the asan ctors/dtors.
2489 We don't need to insert padding after the description strings,
2490 nor after .LASAN* array. */
2491 flag_sanitize &= ~SANITIZE_ADDRESS;
2492
2493 /* For user-space we want asan constructors to run first.
2494 Linux kernel does not support priorities other than default, and the only
2495 other user of constructors is coverage. So we run with the default
2496 priority. */
2497 int priority = flag_sanitize & SANITIZE_USER_ADDRESS
2498 ? MAX_RESERVED_INIT_PRIORITY - 1 : DEFAULT_INIT_PRIORITY;
2499
2500 if (flag_sanitize & SANITIZE_USER_ADDRESS)
2501 {
2502 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
2503 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2504 }
2505 FOR_EACH_DEFINED_VARIABLE (vnode)
2506 if (TREE_ASM_WRITTEN (vnode->decl)
2507 && asan_protect_global (vnode->decl))
2508 ++gcount;
2509 hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab ();
2510 const_desc_htab->traverse<unsigned HOST_WIDE_INT *, count_string_csts>
2511 (&gcount);
2512 if (gcount)
2513 {
2514 tree type = asan_global_struct (), var, ctor;
2515 tree dtor_statements = NULL_TREE;
2516 vec<constructor_elt, va_gc> *v;
2517 char buf[20];
2518
2519 type = build_array_type_nelts (type, gcount);
2520 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
2521 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2522 type);
2523 TREE_STATIC (var) = 1;
2524 TREE_PUBLIC (var) = 0;
2525 DECL_ARTIFICIAL (var) = 1;
2526 DECL_IGNORED_P (var) = 1;
2527 vec_alloc (v, gcount);
2528 FOR_EACH_DEFINED_VARIABLE (vnode)
2529 if (TREE_ASM_WRITTEN (vnode->decl)
2530 && asan_protect_global (vnode->decl))
2531 asan_add_global (vnode->decl, TREE_TYPE (type), v);
2532 struct asan_add_string_csts_data aascd;
2533 aascd.type = TREE_TYPE (type);
2534 aascd.v = v;
2535 const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts>
2536 (&aascd);
2537 ctor = build_constructor (type, v);
2538 TREE_CONSTANT (ctor) = 1;
2539 TREE_STATIC (ctor) = 1;
2540 DECL_INITIAL (var) = ctor;
2541 varpool_node::finalize_decl (var);
2542
2543 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
2544 tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
2545 append_to_statement_list (build_call_expr (fn, 2,
2546 build_fold_addr_expr (var),
2547 gcount_tree),
2548 &asan_ctor_statements);
2549
2550 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
2551 append_to_statement_list (build_call_expr (fn, 2,
2552 build_fold_addr_expr (var),
2553 gcount_tree),
2554 &dtor_statements);
2555 cgraph_build_static_cdtor ('D', dtor_statements, priority);
2556 }
2557 if (asan_ctor_statements)
2558 cgraph_build_static_cdtor ('I', asan_ctor_statements, priority);
2559 flag_sanitize |= SANITIZE_ADDRESS;
2560 }
2561
2562 /* Expand the ASAN_{LOAD,STORE} builtins. */
2563
2564 bool
2565 asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
2566 {
2567 gimple g = gsi_stmt (*iter);
2568 location_t loc = gimple_location (g);
2569
2570 bool recover_p
2571 = (flag_sanitize & flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
2572
2573 HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
2574 gcc_assert (flags < ASAN_CHECK_LAST);
2575 bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
2576 bool is_store = (flags & ASAN_CHECK_STORE) != 0;
2577 bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
2578
2579 tree base = gimple_call_arg (g, 1);
2580 tree len = gimple_call_arg (g, 2);
2581 HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3));
2582
2583 HOST_WIDE_INT size_in_bytes
2584 = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2585
2586 if (use_calls)
2587 {
2588 /* Instrument using callbacks. */
2589 gimple g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2590 NOP_EXPR, base);
2591 gimple_set_location (g, loc);
2592 gsi_insert_before (iter, g, GSI_SAME_STMT);
2593 tree base_addr = gimple_assign_lhs (g);
2594
2595 int nargs;
2596 tree fun = check_func (is_store, recover_p, size_in_bytes, &nargs);
2597 if (nargs == 1)
2598 g = gimple_build_call (fun, 1, base_addr);
2599 else
2600 {
2601 gcc_assert (nargs == 2);
2602 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2603 NOP_EXPR, len);
2604 gimple_set_location (g, loc);
2605 gsi_insert_before (iter, g, GSI_SAME_STMT);
2606 tree sz_arg = gimple_assign_lhs (g);
2607 g = gimple_build_call (fun, nargs, base_addr, sz_arg);
2608 }
2609 gimple_set_location (g, loc);
2610 gsi_replace (iter, g, false);
2611 return false;
2612 }
2613
2614 HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
2615
2616 tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
2617 tree shadow_type = TREE_TYPE (shadow_ptr_type);
2618
2619 gimple_stmt_iterator gsi = *iter;
2620
2621 if (!is_non_zero_len)
2622 {
2623 /* So, the length of the memory area to asan-protect is
2624 non-constant. Let's guard the generated instrumentation code
2625 like:
2626
2627 if (len != 0)
2628 {
2629 //asan instrumentation code goes here.
2630 }
2631 // falltrough instructions, starting with *ITER. */
2632
2633 g = gimple_build_cond (NE_EXPR,
2634 len,
2635 build_int_cst (TREE_TYPE (len), 0),
2636 NULL_TREE, NULL_TREE);
2637 gimple_set_location (g, loc);
2638
2639 basic_block then_bb, fallthrough_bb;
2640 insert_if_then_before_iter (as_a <gcond *> (g), iter,
2641 /*then_more_likely_p=*/true,
2642 &then_bb, &fallthrough_bb);
2643 /* Note that fallthrough_bb starts with the statement that was
2644 pointed to by ITER. */
2645
2646 /* The 'then block' of the 'if (len != 0) condition is where
2647 we'll generate the asan instrumentation code now. */
2648 gsi = gsi_last_bb (then_bb);
2649 }
2650
2651 /* Get an iterator on the point where we can add the condition
2652 statement for the instrumentation. */
2653 basic_block then_bb, else_bb;
2654 gsi = create_cond_insert_point (&gsi, /*before_p*/false,
2655 /*then_more_likely_p=*/false,
2656 /*create_then_fallthru_edge*/recover_p,
2657 &then_bb,
2658 &else_bb);
2659
2660 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2661 NOP_EXPR, base);
2662 gimple_set_location (g, loc);
2663 gsi_insert_before (&gsi, g, GSI_NEW_STMT);
2664 tree base_addr = gimple_assign_lhs (g);
2665
2666 tree t = NULL_TREE;
2667 if (real_size_in_bytes >= 8)
2668 {
2669 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2670 shadow_ptr_type);
2671 t = shadow;
2672 }
2673 else
2674 {
2675 /* Slow path for 1, 2 and 4 byte accesses. */
2676 /* Test (shadow != 0)
2677 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
2678 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2679 shadow_ptr_type);
2680 gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
2681 gimple_seq seq = NULL;
2682 gimple_seq_add_stmt (&seq, shadow_test);
2683 /* Aligned (>= 8 bytes) can test just
2684 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
2685 to be 0. */
2686 if (align < 8)
2687 {
2688 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2689 base_addr, 7));
2690 gimple_seq_add_stmt (&seq,
2691 build_type_cast (shadow_type,
2692 gimple_seq_last (seq)));
2693 if (real_size_in_bytes > 1)
2694 gimple_seq_add_stmt (&seq,
2695 build_assign (PLUS_EXPR,
2696 gimple_seq_last (seq),
2697 real_size_in_bytes - 1));
2698 t = gimple_assign_lhs (gimple_seq_last_stmt (seq));
2699 }
2700 else
2701 t = build_int_cst (shadow_type, real_size_in_bytes - 1);
2702 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow));
2703 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2704 gimple_seq_last (seq)));
2705 t = gimple_assign_lhs (gimple_seq_last (seq));
2706 gimple_seq_set_location (seq, loc);
2707 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2708
2709 /* For non-constant, misaligned or otherwise weird access sizes,
2710 check first and last byte. */
2711 if (size_in_bytes == -1)
2712 {
2713 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2714 MINUS_EXPR, len,
2715 build_int_cst (pointer_sized_int_node, 1));
2716 gimple_set_location (g, loc);
2717 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2718 tree last = gimple_assign_lhs (g);
2719 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2720 PLUS_EXPR, base_addr, last);
2721 gimple_set_location (g, loc);
2722 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2723 tree base_end_addr = gimple_assign_lhs (g);
2724
2725 tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
2726 shadow_ptr_type);
2727 gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
2728 gimple_seq seq = NULL;
2729 gimple_seq_add_stmt (&seq, shadow_test);
2730 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2731 base_end_addr, 7));
2732 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
2733 gimple_seq_last (seq)));
2734 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
2735 gimple_seq_last (seq),
2736 shadow));
2737 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2738 gimple_seq_last (seq)));
2739 gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
2740 gimple_seq_last (seq)));
2741 t = gimple_assign_lhs (gimple_seq_last (seq));
2742 gimple_seq_set_location (seq, loc);
2743 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2744 }
2745 }
2746
2747 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
2748 NULL_TREE, NULL_TREE);
2749 gimple_set_location (g, loc);
2750 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2751
2752 /* Generate call to the run-time library (e.g. __asan_report_load8). */
2753 gsi = gsi_start_bb (then_bb);
2754 int nargs;
2755 tree fun = report_error_func (is_store, recover_p, size_in_bytes, &nargs);
2756 g = gimple_build_call (fun, nargs, base_addr, len);
2757 gimple_set_location (g, loc);
2758 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2759
2760 gsi_remove (iter, true);
2761 *iter = gsi_start_bb (else_bb);
2762
2763 return true;
2764 }
2765
2766 /* Instrument the current function. */
2767
2768 static unsigned int
2769 asan_instrument (void)
2770 {
2771 if (shadow_ptr_types[0] == NULL_TREE)
2772 asan_init_shadow_ptr_types ();
2773 transform_statements ();
2774 return 0;
2775 }
2776
2777 static bool
2778 gate_asan (void)
2779 {
2780 return (flag_sanitize & SANITIZE_ADDRESS) != 0
2781 && !lookup_attribute ("no_sanitize_address",
2782 DECL_ATTRIBUTES (current_function_decl));
2783 }
2784
2785 namespace {
2786
2787 const pass_data pass_data_asan =
2788 {
2789 GIMPLE_PASS, /* type */
2790 "asan", /* name */
2791 OPTGROUP_NONE, /* optinfo_flags */
2792 TV_NONE, /* tv_id */
2793 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2794 0, /* properties_provided */
2795 0, /* properties_destroyed */
2796 0, /* todo_flags_start */
2797 TODO_update_ssa, /* todo_flags_finish */
2798 };
2799
2800 class pass_asan : public gimple_opt_pass
2801 {
2802 public:
2803 pass_asan (gcc::context *ctxt)
2804 : gimple_opt_pass (pass_data_asan, ctxt)
2805 {}
2806
2807 /* opt_pass methods: */
2808 opt_pass * clone () { return new pass_asan (m_ctxt); }
2809 virtual bool gate (function *) { return gate_asan (); }
2810 virtual unsigned int execute (function *) { return asan_instrument (); }
2811
2812 }; // class pass_asan
2813
2814 } // anon namespace
2815
2816 gimple_opt_pass *
2817 make_pass_asan (gcc::context *ctxt)
2818 {
2819 return new pass_asan (ctxt);
2820 }
2821
2822 namespace {
2823
2824 const pass_data pass_data_asan_O0 =
2825 {
2826 GIMPLE_PASS, /* type */
2827 "asan0", /* name */
2828 OPTGROUP_NONE, /* optinfo_flags */
2829 TV_NONE, /* tv_id */
2830 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2831 0, /* properties_provided */
2832 0, /* properties_destroyed */
2833 0, /* todo_flags_start */
2834 TODO_update_ssa, /* todo_flags_finish */
2835 };
2836
2837 class pass_asan_O0 : public gimple_opt_pass
2838 {
2839 public:
2840 pass_asan_O0 (gcc::context *ctxt)
2841 : gimple_opt_pass (pass_data_asan_O0, ctxt)
2842 {}
2843
2844 /* opt_pass methods: */
2845 virtual bool gate (function *) { return !optimize && gate_asan (); }
2846 virtual unsigned int execute (function *) { return asan_instrument (); }
2847
2848 }; // class pass_asan_O0
2849
2850 } // anon namespace
2851
2852 gimple_opt_pass *
2853 make_pass_asan_O0 (gcc::context *ctxt)
2854 {
2855 return new pass_asan_O0 (ctxt);
2856 }
2857
2858 #include "gt-asan.h"