See <https://gcc.gnu.org/ml/gcc-patches/2015-05/msg01977.html> for
[gcc.git] / gcc / asan.c
1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2015 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "hash-set.h"
26 #include "machmode.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "options.h"
33 #include "wide-int.h"
34 #include "inchash.h"
35 #include "tree.h"
36 #include "fold-const.h"
37 #include "hash-table.h"
38 #include "predict.h"
39 #include "tm.h"
40 #include "hard-reg-set.h"
41 #include "function.h"
42 #include "dominance.h"
43 #include "cfg.h"
44 #include "cfganal.h"
45 #include "basic-block.h"
46 #include "tree-ssa-alias.h"
47 #include "internal-fn.h"
48 #include "gimple-expr.h"
49 #include "is-a.h"
50 #include "gimple.h"
51 #include "gimplify.h"
52 #include "gimple-iterator.h"
53 #include "calls.h"
54 #include "varasm.h"
55 #include "stor-layout.h"
56 #include "tree-iterator.h"
57 #include "hash-map.h"
58 #include "plugin-api.h"
59 #include "ipa-ref.h"
60 #include "cgraph.h"
61 #include "stringpool.h"
62 #include "tree-ssanames.h"
63 #include "tree-pass.h"
64 #include "asan.h"
65 #include "gimple-pretty-print.h"
66 #include "target.h"
67 #include "hashtab.h"
68 #include "rtl.h"
69 #include "flags.h"
70 #include "statistics.h"
71 #include "real.h"
72 #include "fixed-value.h"
73 #include "insn-config.h"
74 #include "expmed.h"
75 #include "dojump.h"
76 #include "explow.h"
77 #include "emit-rtl.h"
78 #include "stmt.h"
79 #include "expr.h"
80 #include "insn-codes.h"
81 #include "optabs.h"
82 #include "output.h"
83 #include "tm_p.h"
84 #include "langhooks.h"
85 #include "alloc-pool.h"
86 #include "cfgloop.h"
87 #include "gimple-builder.h"
88 #include "ubsan.h"
89 #include "params.h"
90 #include "builtins.h"
91 #include "fnmatch.h"
92
93 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
94 with <2x slowdown on average.
95
96 The tool consists of two parts:
97 instrumentation module (this file) and a run-time library.
98 The instrumentation module adds a run-time check before every memory insn.
99 For a 8- or 16- byte load accessing address X:
100 ShadowAddr = (X >> 3) + Offset
101 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
102 if (ShadowValue)
103 __asan_report_load8(X);
104 For a load of N bytes (N=1, 2 or 4) from address X:
105 ShadowAddr = (X >> 3) + Offset
106 ShadowValue = *(char*)ShadowAddr;
107 if (ShadowValue)
108 if ((X & 7) + N - 1 > ShadowValue)
109 __asan_report_loadN(X);
110 Stores are instrumented similarly, but using __asan_report_storeN functions.
111 A call too __asan_init_vN() is inserted to the list of module CTORs.
112 N is the version number of the AddressSanitizer API. The changes between the
113 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
114
115 The run-time library redefines malloc (so that redzone are inserted around
116 the allocated memory) and free (so that reuse of free-ed memory is delayed),
117 provides __asan_report* and __asan_init_vN functions.
118
119 Read more:
120 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
121
122 The current implementation supports detection of out-of-bounds and
123 use-after-free in the heap, on the stack and for global variables.
124
125 [Protection of stack variables]
126
127 To understand how detection of out-of-bounds and use-after-free works
128 for stack variables, lets look at this example on x86_64 where the
129 stack grows downward:
130
131 int
132 foo ()
133 {
134 char a[23] = {0};
135 int b[2] = {0};
136
137 a[5] = 1;
138 b[1] = 2;
139
140 return a[5] + b[1];
141 }
142
143 For this function, the stack protected by asan will be organized as
144 follows, from the top of the stack to the bottom:
145
146 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
147
148 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
149 the next slot be 32 bytes aligned; this one is called Partial
150 Redzone; this 32 bytes alignment is an asan constraint]
151
152 Slot 3/ [24 bytes for variable 'a']
153
154 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
155
156 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
157
158 Slot 6/ [8 bytes for variable 'b']
159
160 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
161 'LEFT RedZone']
162
163 The 32 bytes of LEFT red zone at the bottom of the stack can be
164 decomposed as such:
165
166 1/ The first 8 bytes contain a magical asan number that is always
167 0x41B58AB3.
168
169 2/ The following 8 bytes contains a pointer to a string (to be
170 parsed at runtime by the runtime asan library), which format is
171 the following:
172
173 "<function-name> <space> <num-of-variables-on-the-stack>
174 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
175 <length-of-var-in-bytes> ){n} "
176
177 where '(...){n}' means the content inside the parenthesis occurs 'n'
178 times, with 'n' being the number of variables on the stack.
179
180 3/ The following 8 bytes contain the PC of the current function which
181 will be used by the run-time library to print an error message.
182
183 4/ The following 8 bytes are reserved for internal use by the run-time.
184
185 The shadow memory for that stack layout is going to look like this:
186
187 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
188 The F1 byte pattern is a magic number called
189 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
190 the memory for that shadow byte is part of a the LEFT red zone
191 intended to seat at the bottom of the variables on the stack.
192
193 - content of shadow memory 8 bytes for slots 6 and 5:
194 0xF4F4F400. The F4 byte pattern is a magic number
195 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
196 memory region for this shadow byte is a PARTIAL red zone
197 intended to pad a variable A, so that the slot following
198 {A,padding} is 32 bytes aligned.
199
200 Note that the fact that the least significant byte of this
201 shadow memory content is 00 means that 8 bytes of its
202 corresponding memory (which corresponds to the memory of
203 variable 'b') is addressable.
204
205 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
206 The F2 byte pattern is a magic number called
207 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
208 region for this shadow byte is a MIDDLE red zone intended to
209 seat between two 32 aligned slots of {variable,padding}.
210
211 - content of shadow memory 8 bytes for slot 3 and 2:
212 0xF4000000. This represents is the concatenation of
213 variable 'a' and the partial red zone following it, like what we
214 had for variable 'b'. The least significant 3 bytes being 00
215 means that the 3 bytes of variable 'a' are addressable.
216
217 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
218 The F3 byte pattern is a magic number called
219 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
220 region for this shadow byte is a RIGHT red zone intended to seat
221 at the top of the variables of the stack.
222
223 Note that the real variable layout is done in expand_used_vars in
224 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
225 stack variables as well as the different red zones, emits some
226 prologue code to populate the shadow memory as to poison (mark as
227 non-accessible) the regions of the red zones and mark the regions of
228 stack variables as accessible, and emit some epilogue code to
229 un-poison (mark as accessible) the regions of red zones right before
230 the function exits.
231
232 [Protection of global variables]
233
234 The basic idea is to insert a red zone between two global variables
235 and install a constructor function that calls the asan runtime to do
236 the populating of the relevant shadow memory regions at load time.
237
238 So the global variables are laid out as to insert a red zone between
239 them. The size of the red zones is so that each variable starts on a
240 32 bytes boundary.
241
242 Then a constructor function is installed so that, for each global
243 variable, it calls the runtime asan library function
244 __asan_register_globals_with an instance of this type:
245
246 struct __asan_global
247 {
248 // Address of the beginning of the global variable.
249 const void *__beg;
250
251 // Initial size of the global variable.
252 uptr __size;
253
254 // Size of the global variable + size of the red zone. This
255 // size is 32 bytes aligned.
256 uptr __size_with_redzone;
257
258 // Name of the global variable.
259 const void *__name;
260
261 // Name of the module where the global variable is declared.
262 const void *__module_name;
263
264 // 1 if it has dynamic initialization, 0 otherwise.
265 uptr __has_dynamic_init;
266
267 // A pointer to struct that contains source location, could be NULL.
268 __asan_global_source_location *__location;
269 }
270
271 A destructor function that calls the runtime asan library function
272 _asan_unregister_globals is also installed. */
273
274 static unsigned HOST_WIDE_INT asan_shadow_offset_value;
275 static bool asan_shadow_offset_computed;
276 static vec<char *> sanitized_sections;
277
278 /* Sets shadow offset to value in string VAL. */
279
280 bool
281 set_asan_shadow_offset (const char *val)
282 {
283 char *endp;
284
285 errno = 0;
286 #ifdef HAVE_LONG_LONG
287 asan_shadow_offset_value = strtoull (val, &endp, 0);
288 #else
289 asan_shadow_offset_value = strtoul (val, &endp, 0);
290 #endif
291 if (!(*val != '\0' && *endp == '\0' && errno == 0))
292 return false;
293
294 asan_shadow_offset_computed = true;
295
296 return true;
297 }
298
299 /* Set list of user-defined sections that need to be sanitized. */
300
301 void
302 set_sanitized_sections (const char *sections)
303 {
304 char *pat;
305 unsigned i;
306 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
307 free (pat);
308 sanitized_sections.truncate (0);
309
310 for (const char *s = sections; *s; )
311 {
312 const char *end;
313 for (end = s; *end && *end != ','; ++end);
314 size_t len = end - s;
315 sanitized_sections.safe_push (xstrndup (s, len));
316 s = *end ? end + 1 : end;
317 }
318 }
319
320 /* Checks whether section SEC should be sanitized. */
321
322 static bool
323 section_sanitized_p (const char *sec)
324 {
325 char *pat;
326 unsigned i;
327 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
328 if (fnmatch (pat, sec, FNM_PERIOD) == 0)
329 return true;
330 return false;
331 }
332
333 /* Returns Asan shadow offset. */
334
335 static unsigned HOST_WIDE_INT
336 asan_shadow_offset ()
337 {
338 if (!asan_shadow_offset_computed)
339 {
340 asan_shadow_offset_computed = true;
341 asan_shadow_offset_value = targetm.asan_shadow_offset ();
342 }
343 return asan_shadow_offset_value;
344 }
345
346 alias_set_type asan_shadow_set = -1;
347
348 /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
349 alias set is used for all shadow memory accesses. */
350 static GTY(()) tree shadow_ptr_types[2];
351
352 /* Decl for __asan_option_detect_stack_use_after_return. */
353 static GTY(()) tree asan_detect_stack_use_after_return;
354
355 /* Various flags for Asan builtins. */
356 enum asan_check_flags
357 {
358 ASAN_CHECK_STORE = 1 << 0,
359 ASAN_CHECK_SCALAR_ACCESS = 1 << 1,
360 ASAN_CHECK_NON_ZERO_LEN = 1 << 2,
361 ASAN_CHECK_LAST = 1 << 3
362 };
363
364 /* Hashtable support for memory references used by gimple
365 statements. */
366
367 /* This type represents a reference to a memory region. */
368 struct asan_mem_ref
369 {
370 /* The expression of the beginning of the memory region. */
371 tree start;
372
373 /* The size of the access. */
374 HOST_WIDE_INT access_size;
375 };
376
377 static alloc_pool asan_mem_ref_alloc_pool;
378
379 /* This creates the alloc pool used to store the instances of
380 asan_mem_ref that are stored in the hash table asan_mem_ref_ht. */
381
382 static alloc_pool
383 asan_mem_ref_get_alloc_pool ()
384 {
385 if (asan_mem_ref_alloc_pool == NULL)
386 asan_mem_ref_alloc_pool = create_alloc_pool ("asan_mem_ref",
387 sizeof (asan_mem_ref),
388 10);
389 return asan_mem_ref_alloc_pool;
390
391 }
392
393 /* Initializes an instance of asan_mem_ref. */
394
395 static void
396 asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
397 {
398 ref->start = start;
399 ref->access_size = access_size;
400 }
401
402 /* Allocates memory for an instance of asan_mem_ref into the memory
403 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
404 START is the address of (or the expression pointing to) the
405 beginning of memory reference. ACCESS_SIZE is the size of the
406 access to the referenced memory. */
407
408 static asan_mem_ref*
409 asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
410 {
411 asan_mem_ref *ref =
412 (asan_mem_ref *) pool_alloc (asan_mem_ref_get_alloc_pool ());
413
414 asan_mem_ref_init (ref, start, access_size);
415 return ref;
416 }
417
418 /* This builds and returns a pointer to the end of the memory region
419 that starts at START and of length LEN. */
420
421 tree
422 asan_mem_ref_get_end (tree start, tree len)
423 {
424 if (len == NULL_TREE || integer_zerop (len))
425 return start;
426
427 if (!ptrofftype_p (len))
428 len = convert_to_ptrofftype (len);
429
430 return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
431 }
432
433 /* Return a tree expression that represents the end of the referenced
434 memory region. Beware that this function can actually build a new
435 tree expression. */
436
437 tree
438 asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
439 {
440 return asan_mem_ref_get_end (ref->start, len);
441 }
442
443 struct asan_mem_ref_hasher
444 : typed_noop_remove <asan_mem_ref>
445 {
446 typedef asan_mem_ref *value_type;
447 typedef asan_mem_ref *compare_type;
448
449 static inline hashval_t hash (const asan_mem_ref *);
450 static inline bool equal (const asan_mem_ref *, const asan_mem_ref *);
451 };
452
453 /* Hash a memory reference. */
454
455 inline hashval_t
456 asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
457 {
458 return iterative_hash_expr (mem_ref->start, 0);
459 }
460
461 /* Compare two memory references. We accept the length of either
462 memory references to be NULL_TREE. */
463
464 inline bool
465 asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
466 const asan_mem_ref *m2)
467 {
468 return operand_equal_p (m1->start, m2->start, 0);
469 }
470
471 static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
472
473 /* Returns a reference to the hash table containing memory references.
474 This function ensures that the hash table is created. Note that
475 this hash table is updated by the function
476 update_mem_ref_hash_table. */
477
478 static hash_table<asan_mem_ref_hasher> *
479 get_mem_ref_hash_table ()
480 {
481 if (!asan_mem_ref_ht)
482 asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
483
484 return asan_mem_ref_ht;
485 }
486
487 /* Clear all entries from the memory references hash table. */
488
489 static void
490 empty_mem_ref_hash_table ()
491 {
492 if (asan_mem_ref_ht)
493 asan_mem_ref_ht->empty ();
494 }
495
496 /* Free the memory references hash table. */
497
498 static void
499 free_mem_ref_resources ()
500 {
501 delete asan_mem_ref_ht;
502 asan_mem_ref_ht = NULL;
503
504 if (asan_mem_ref_alloc_pool)
505 {
506 free_alloc_pool (asan_mem_ref_alloc_pool);
507 asan_mem_ref_alloc_pool = NULL;
508 }
509 }
510
511 /* Return true iff the memory reference REF has been instrumented. */
512
513 static bool
514 has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
515 {
516 asan_mem_ref r;
517 asan_mem_ref_init (&r, ref, access_size);
518
519 asan_mem_ref *saved_ref = get_mem_ref_hash_table ()->find (&r);
520 return saved_ref && saved_ref->access_size >= access_size;
521 }
522
523 /* Return true iff the memory reference REF has been instrumented. */
524
525 static bool
526 has_mem_ref_been_instrumented (const asan_mem_ref *ref)
527 {
528 return has_mem_ref_been_instrumented (ref->start, ref->access_size);
529 }
530
531 /* Return true iff access to memory region starting at REF and of
532 length LEN has been instrumented. */
533
534 static bool
535 has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
536 {
537 HOST_WIDE_INT size_in_bytes
538 = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
539
540 return size_in_bytes != -1
541 && has_mem_ref_been_instrumented (ref->start, size_in_bytes);
542 }
543
544 /* Set REF to the memory reference present in a gimple assignment
545 ASSIGNMENT. Return true upon successful completion, false
546 otherwise. */
547
548 static bool
549 get_mem_ref_of_assignment (const gassign *assignment,
550 asan_mem_ref *ref,
551 bool *ref_is_store)
552 {
553 gcc_assert (gimple_assign_single_p (assignment));
554
555 if (gimple_store_p (assignment)
556 && !gimple_clobber_p (assignment))
557 {
558 ref->start = gimple_assign_lhs (assignment);
559 *ref_is_store = true;
560 }
561 else if (gimple_assign_load_p (assignment))
562 {
563 ref->start = gimple_assign_rhs1 (assignment);
564 *ref_is_store = false;
565 }
566 else
567 return false;
568
569 ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
570 return true;
571 }
572
573 /* Return the memory references contained in a gimple statement
574 representing a builtin call that has to do with memory access. */
575
576 static bool
577 get_mem_refs_of_builtin_call (const gcall *call,
578 asan_mem_ref *src0,
579 tree *src0_len,
580 bool *src0_is_store,
581 asan_mem_ref *src1,
582 tree *src1_len,
583 bool *src1_is_store,
584 asan_mem_ref *dst,
585 tree *dst_len,
586 bool *dst_is_store,
587 bool *dest_is_deref,
588 bool *intercepted_p)
589 {
590 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
591
592 tree callee = gimple_call_fndecl (call);
593 tree source0 = NULL_TREE, source1 = NULL_TREE,
594 dest = NULL_TREE, len = NULL_TREE;
595 bool is_store = true, got_reference_p = false;
596 HOST_WIDE_INT access_size = 1;
597
598 *intercepted_p = asan_intercepted_p ((DECL_FUNCTION_CODE (callee)));
599
600 switch (DECL_FUNCTION_CODE (callee))
601 {
602 /* (s, s, n) style memops. */
603 case BUILT_IN_BCMP:
604 case BUILT_IN_MEMCMP:
605 source0 = gimple_call_arg (call, 0);
606 source1 = gimple_call_arg (call, 1);
607 len = gimple_call_arg (call, 2);
608 break;
609
610 /* (src, dest, n) style memops. */
611 case BUILT_IN_BCOPY:
612 source0 = gimple_call_arg (call, 0);
613 dest = gimple_call_arg (call, 1);
614 len = gimple_call_arg (call, 2);
615 break;
616
617 /* (dest, src, n) style memops. */
618 case BUILT_IN_MEMCPY:
619 case BUILT_IN_MEMCPY_CHK:
620 case BUILT_IN_MEMMOVE:
621 case BUILT_IN_MEMMOVE_CHK:
622 case BUILT_IN_MEMPCPY:
623 case BUILT_IN_MEMPCPY_CHK:
624 dest = gimple_call_arg (call, 0);
625 source0 = gimple_call_arg (call, 1);
626 len = gimple_call_arg (call, 2);
627 break;
628
629 /* (dest, n) style memops. */
630 case BUILT_IN_BZERO:
631 dest = gimple_call_arg (call, 0);
632 len = gimple_call_arg (call, 1);
633 break;
634
635 /* (dest, x, n) style memops*/
636 case BUILT_IN_MEMSET:
637 case BUILT_IN_MEMSET_CHK:
638 dest = gimple_call_arg (call, 0);
639 len = gimple_call_arg (call, 2);
640 break;
641
642 case BUILT_IN_STRLEN:
643 source0 = gimple_call_arg (call, 0);
644 len = gimple_call_lhs (call);
645 break ;
646
647 /* And now the __atomic* and __sync builtins.
648 These are handled differently from the classical memory memory
649 access builtins above. */
650
651 case BUILT_IN_ATOMIC_LOAD_1:
652 case BUILT_IN_ATOMIC_LOAD_2:
653 case BUILT_IN_ATOMIC_LOAD_4:
654 case BUILT_IN_ATOMIC_LOAD_8:
655 case BUILT_IN_ATOMIC_LOAD_16:
656 is_store = false;
657 /* fall through. */
658
659 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
660 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
661 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
662 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
663 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
664
665 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
666 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
667 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
668 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
669 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
670
671 case BUILT_IN_SYNC_FETCH_AND_OR_1:
672 case BUILT_IN_SYNC_FETCH_AND_OR_2:
673 case BUILT_IN_SYNC_FETCH_AND_OR_4:
674 case BUILT_IN_SYNC_FETCH_AND_OR_8:
675 case BUILT_IN_SYNC_FETCH_AND_OR_16:
676
677 case BUILT_IN_SYNC_FETCH_AND_AND_1:
678 case BUILT_IN_SYNC_FETCH_AND_AND_2:
679 case BUILT_IN_SYNC_FETCH_AND_AND_4:
680 case BUILT_IN_SYNC_FETCH_AND_AND_8:
681 case BUILT_IN_SYNC_FETCH_AND_AND_16:
682
683 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
684 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
685 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
686 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
687 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
688
689 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
690 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
691 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
692 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
693
694 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
695 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
696 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
697 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
698 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
699
700 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
701 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
702 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
703 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
704 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
705
706 case BUILT_IN_SYNC_OR_AND_FETCH_1:
707 case BUILT_IN_SYNC_OR_AND_FETCH_2:
708 case BUILT_IN_SYNC_OR_AND_FETCH_4:
709 case BUILT_IN_SYNC_OR_AND_FETCH_8:
710 case BUILT_IN_SYNC_OR_AND_FETCH_16:
711
712 case BUILT_IN_SYNC_AND_AND_FETCH_1:
713 case BUILT_IN_SYNC_AND_AND_FETCH_2:
714 case BUILT_IN_SYNC_AND_AND_FETCH_4:
715 case BUILT_IN_SYNC_AND_AND_FETCH_8:
716 case BUILT_IN_SYNC_AND_AND_FETCH_16:
717
718 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
719 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
720 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
721 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
722 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
723
724 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
725 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
726 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
727 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
728
729 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
730 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
731 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
732 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
733 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
734
735 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
736 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
737 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
738 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
739 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
740
741 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
742 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
743 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
744 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
745 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
746
747 case BUILT_IN_SYNC_LOCK_RELEASE_1:
748 case BUILT_IN_SYNC_LOCK_RELEASE_2:
749 case BUILT_IN_SYNC_LOCK_RELEASE_4:
750 case BUILT_IN_SYNC_LOCK_RELEASE_8:
751 case BUILT_IN_SYNC_LOCK_RELEASE_16:
752
753 case BUILT_IN_ATOMIC_EXCHANGE_1:
754 case BUILT_IN_ATOMIC_EXCHANGE_2:
755 case BUILT_IN_ATOMIC_EXCHANGE_4:
756 case BUILT_IN_ATOMIC_EXCHANGE_8:
757 case BUILT_IN_ATOMIC_EXCHANGE_16:
758
759 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
760 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
761 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
762 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
763 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
764
765 case BUILT_IN_ATOMIC_STORE_1:
766 case BUILT_IN_ATOMIC_STORE_2:
767 case BUILT_IN_ATOMIC_STORE_4:
768 case BUILT_IN_ATOMIC_STORE_8:
769 case BUILT_IN_ATOMIC_STORE_16:
770
771 case BUILT_IN_ATOMIC_ADD_FETCH_1:
772 case BUILT_IN_ATOMIC_ADD_FETCH_2:
773 case BUILT_IN_ATOMIC_ADD_FETCH_4:
774 case BUILT_IN_ATOMIC_ADD_FETCH_8:
775 case BUILT_IN_ATOMIC_ADD_FETCH_16:
776
777 case BUILT_IN_ATOMIC_SUB_FETCH_1:
778 case BUILT_IN_ATOMIC_SUB_FETCH_2:
779 case BUILT_IN_ATOMIC_SUB_FETCH_4:
780 case BUILT_IN_ATOMIC_SUB_FETCH_8:
781 case BUILT_IN_ATOMIC_SUB_FETCH_16:
782
783 case BUILT_IN_ATOMIC_AND_FETCH_1:
784 case BUILT_IN_ATOMIC_AND_FETCH_2:
785 case BUILT_IN_ATOMIC_AND_FETCH_4:
786 case BUILT_IN_ATOMIC_AND_FETCH_8:
787 case BUILT_IN_ATOMIC_AND_FETCH_16:
788
789 case BUILT_IN_ATOMIC_NAND_FETCH_1:
790 case BUILT_IN_ATOMIC_NAND_FETCH_2:
791 case BUILT_IN_ATOMIC_NAND_FETCH_4:
792 case BUILT_IN_ATOMIC_NAND_FETCH_8:
793 case BUILT_IN_ATOMIC_NAND_FETCH_16:
794
795 case BUILT_IN_ATOMIC_XOR_FETCH_1:
796 case BUILT_IN_ATOMIC_XOR_FETCH_2:
797 case BUILT_IN_ATOMIC_XOR_FETCH_4:
798 case BUILT_IN_ATOMIC_XOR_FETCH_8:
799 case BUILT_IN_ATOMIC_XOR_FETCH_16:
800
801 case BUILT_IN_ATOMIC_OR_FETCH_1:
802 case BUILT_IN_ATOMIC_OR_FETCH_2:
803 case BUILT_IN_ATOMIC_OR_FETCH_4:
804 case BUILT_IN_ATOMIC_OR_FETCH_8:
805 case BUILT_IN_ATOMIC_OR_FETCH_16:
806
807 case BUILT_IN_ATOMIC_FETCH_ADD_1:
808 case BUILT_IN_ATOMIC_FETCH_ADD_2:
809 case BUILT_IN_ATOMIC_FETCH_ADD_4:
810 case BUILT_IN_ATOMIC_FETCH_ADD_8:
811 case BUILT_IN_ATOMIC_FETCH_ADD_16:
812
813 case BUILT_IN_ATOMIC_FETCH_SUB_1:
814 case BUILT_IN_ATOMIC_FETCH_SUB_2:
815 case BUILT_IN_ATOMIC_FETCH_SUB_4:
816 case BUILT_IN_ATOMIC_FETCH_SUB_8:
817 case BUILT_IN_ATOMIC_FETCH_SUB_16:
818
819 case BUILT_IN_ATOMIC_FETCH_AND_1:
820 case BUILT_IN_ATOMIC_FETCH_AND_2:
821 case BUILT_IN_ATOMIC_FETCH_AND_4:
822 case BUILT_IN_ATOMIC_FETCH_AND_8:
823 case BUILT_IN_ATOMIC_FETCH_AND_16:
824
825 case BUILT_IN_ATOMIC_FETCH_NAND_1:
826 case BUILT_IN_ATOMIC_FETCH_NAND_2:
827 case BUILT_IN_ATOMIC_FETCH_NAND_4:
828 case BUILT_IN_ATOMIC_FETCH_NAND_8:
829 case BUILT_IN_ATOMIC_FETCH_NAND_16:
830
831 case BUILT_IN_ATOMIC_FETCH_XOR_1:
832 case BUILT_IN_ATOMIC_FETCH_XOR_2:
833 case BUILT_IN_ATOMIC_FETCH_XOR_4:
834 case BUILT_IN_ATOMIC_FETCH_XOR_8:
835 case BUILT_IN_ATOMIC_FETCH_XOR_16:
836
837 case BUILT_IN_ATOMIC_FETCH_OR_1:
838 case BUILT_IN_ATOMIC_FETCH_OR_2:
839 case BUILT_IN_ATOMIC_FETCH_OR_4:
840 case BUILT_IN_ATOMIC_FETCH_OR_8:
841 case BUILT_IN_ATOMIC_FETCH_OR_16:
842 {
843 dest = gimple_call_arg (call, 0);
844 /* DEST represents the address of a memory location.
845 instrument_derefs wants the memory location, so lets
846 dereference the address DEST before handing it to
847 instrument_derefs. */
848 if (TREE_CODE (dest) == ADDR_EXPR)
849 dest = TREE_OPERAND (dest, 0);
850 else if (TREE_CODE (dest) == SSA_NAME || TREE_CODE (dest) == INTEGER_CST)
851 dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
852 dest, build_int_cst (TREE_TYPE (dest), 0));
853 else
854 gcc_unreachable ();
855
856 access_size = int_size_in_bytes (TREE_TYPE (dest));
857 }
858
859 default:
860 /* The other builtins memory access are not instrumented in this
861 function because they either don't have any length parameter,
862 or their length parameter is just a limit. */
863 break;
864 }
865
866 if (len != NULL_TREE)
867 {
868 if (source0 != NULL_TREE)
869 {
870 src0->start = source0;
871 src0->access_size = access_size;
872 *src0_len = len;
873 *src0_is_store = false;
874 }
875
876 if (source1 != NULL_TREE)
877 {
878 src1->start = source1;
879 src1->access_size = access_size;
880 *src1_len = len;
881 *src1_is_store = false;
882 }
883
884 if (dest != NULL_TREE)
885 {
886 dst->start = dest;
887 dst->access_size = access_size;
888 *dst_len = len;
889 *dst_is_store = true;
890 }
891
892 got_reference_p = true;
893 }
894 else if (dest)
895 {
896 dst->start = dest;
897 dst->access_size = access_size;
898 *dst_len = NULL_TREE;
899 *dst_is_store = is_store;
900 *dest_is_deref = true;
901 got_reference_p = true;
902 }
903
904 return got_reference_p;
905 }
906
907 /* Return true iff a given gimple statement has been instrumented.
908 Note that the statement is "defined" by the memory references it
909 contains. */
910
911 static bool
912 has_stmt_been_instrumented_p (gimple stmt)
913 {
914 if (gimple_assign_single_p (stmt))
915 {
916 bool r_is_store;
917 asan_mem_ref r;
918 asan_mem_ref_init (&r, NULL, 1);
919
920 if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
921 &r_is_store))
922 return has_mem_ref_been_instrumented (&r);
923 }
924 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
925 {
926 asan_mem_ref src0, src1, dest;
927 asan_mem_ref_init (&src0, NULL, 1);
928 asan_mem_ref_init (&src1, NULL, 1);
929 asan_mem_ref_init (&dest, NULL, 1);
930
931 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
932 bool src0_is_store = false, src1_is_store = false,
933 dest_is_store = false, dest_is_deref = false, intercepted_p = true;
934 if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
935 &src0, &src0_len, &src0_is_store,
936 &src1, &src1_len, &src1_is_store,
937 &dest, &dest_len, &dest_is_store,
938 &dest_is_deref, &intercepted_p))
939 {
940 if (src0.start != NULL_TREE
941 && !has_mem_ref_been_instrumented (&src0, src0_len))
942 return false;
943
944 if (src1.start != NULL_TREE
945 && !has_mem_ref_been_instrumented (&src1, src1_len))
946 return false;
947
948 if (dest.start != NULL_TREE
949 && !has_mem_ref_been_instrumented (&dest, dest_len))
950 return false;
951
952 return true;
953 }
954 }
955 return false;
956 }
957
958 /* Insert a memory reference into the hash table. */
959
960 static void
961 update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
962 {
963 hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
964
965 asan_mem_ref r;
966 asan_mem_ref_init (&r, ref, access_size);
967
968 asan_mem_ref **slot = ht->find_slot (&r, INSERT);
969 if (*slot == NULL || (*slot)->access_size < access_size)
970 *slot = asan_mem_ref_new (ref, access_size);
971 }
972
973 /* Initialize shadow_ptr_types array. */
974
975 static void
976 asan_init_shadow_ptr_types (void)
977 {
978 asan_shadow_set = new_alias_set ();
979 shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
980 TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
981 shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
982 shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
983 TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
984 shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
985 initialize_sanitizer_builtins ();
986 }
987
988 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
989
990 static tree
991 asan_pp_string (pretty_printer *pp)
992 {
993 const char *buf = pp_formatted_text (pp);
994 size_t len = strlen (buf);
995 tree ret = build_string (len + 1, buf);
996 TREE_TYPE (ret)
997 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
998 build_index_type (size_int (len)));
999 TREE_READONLY (ret) = 1;
1000 TREE_STATIC (ret) = 1;
1001 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
1002 }
1003
1004 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
1005
1006 static rtx
1007 asan_shadow_cst (unsigned char shadow_bytes[4])
1008 {
1009 int i;
1010 unsigned HOST_WIDE_INT val = 0;
1011 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
1012 for (i = 0; i < 4; i++)
1013 val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
1014 << (BITS_PER_UNIT * i);
1015 return gen_int_mode (val, SImode);
1016 }
1017
1018 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
1019 though. */
1020
1021 static void
1022 asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
1023 {
1024 rtx_insn *insn, *insns, *jump;
1025 rtx_code_label *top_label;
1026 rtx end, addr, tmp;
1027
1028 start_sequence ();
1029 clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
1030 insns = get_insns ();
1031 end_sequence ();
1032 for (insn = insns; insn; insn = NEXT_INSN (insn))
1033 if (CALL_P (insn))
1034 break;
1035 if (insn == NULL_RTX)
1036 {
1037 emit_insn (insns);
1038 return;
1039 }
1040
1041 gcc_assert ((len & 3) == 0);
1042 top_label = gen_label_rtx ();
1043 addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
1044 shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
1045 end = force_reg (Pmode, plus_constant (Pmode, addr, len));
1046 emit_label (top_label);
1047
1048 emit_move_insn (shadow_mem, const0_rtx);
1049 tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
1050 true, OPTAB_LIB_WIDEN);
1051 if (tmp != addr)
1052 emit_move_insn (addr, tmp);
1053 emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
1054 jump = get_last_insn ();
1055 gcc_assert (JUMP_P (jump));
1056 add_int_reg_note (jump, REG_BR_PROB, REG_BR_PROB_BASE * 80 / 100);
1057 }
1058
1059 void
1060 asan_function_start (void)
1061 {
1062 section *fnsec = function_section (current_function_decl);
1063 switch_to_section (fnsec);
1064 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
1065 current_function_funcdef_no);
1066 }
1067
1068 /* Insert code to protect stack vars. The prologue sequence should be emitted
1069 directly, epilogue sequence returned. BASE is the register holding the
1070 stack base, against which OFFSETS array offsets are relative to, OFFSETS
1071 array contains pairs of offsets in reverse order, always the end offset
1072 of some gap that needs protection followed by starting offset,
1073 and DECLS is an array of representative decls for each var partition.
1074 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1075 elements long (OFFSETS include gap before the first variable as well
1076 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
1077 register which stack vars DECL_RTLs are based on. Either BASE should be
1078 assigned to PBASE, when not doing use after return protection, or
1079 corresponding address based on __asan_stack_malloc* return value. */
1080
1081 rtx_insn *
1082 asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
1083 HOST_WIDE_INT *offsets, tree *decls, int length)
1084 {
1085 rtx shadow_base, shadow_mem, ret, mem, orig_base;
1086 rtx_code_label *lab;
1087 rtx_insn *insns;
1088 char buf[30];
1089 unsigned char shadow_bytes[4];
1090 HOST_WIDE_INT base_offset = offsets[length - 1];
1091 HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
1092 HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
1093 HOST_WIDE_INT last_offset, last_size;
1094 int l;
1095 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
1096 tree str_cst, decl, id;
1097 int use_after_return_class = -1;
1098
1099 if (shadow_ptr_types[0] == NULL_TREE)
1100 asan_init_shadow_ptr_types ();
1101
1102 /* First of all, prepare the description string. */
1103 pretty_printer asan_pp;
1104
1105 pp_decimal_int (&asan_pp, length / 2 - 1);
1106 pp_space (&asan_pp);
1107 for (l = length - 2; l; l -= 2)
1108 {
1109 tree decl = decls[l / 2 - 1];
1110 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
1111 pp_space (&asan_pp);
1112 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
1113 pp_space (&asan_pp);
1114 if (DECL_P (decl) && DECL_NAME (decl))
1115 {
1116 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
1117 pp_space (&asan_pp);
1118 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
1119 }
1120 else
1121 pp_string (&asan_pp, "9 <unknown>");
1122 pp_space (&asan_pp);
1123 }
1124 str_cst = asan_pp_string (&asan_pp);
1125
1126 /* Emit the prologue sequence. */
1127 if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
1128 && ASAN_USE_AFTER_RETURN)
1129 {
1130 use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
1131 /* __asan_stack_malloc_N guarantees alignment
1132 N < 6 ? (64 << N) : 4096 bytes. */
1133 if (alignb > (use_after_return_class < 6
1134 ? (64U << use_after_return_class) : 4096U))
1135 use_after_return_class = -1;
1136 else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
1137 base_align_bias = ((asan_frame_size + alignb - 1)
1138 & ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
1139 }
1140 /* Align base if target is STRICT_ALIGNMENT. */
1141 if (STRICT_ALIGNMENT)
1142 base = expand_binop (Pmode, and_optab, base,
1143 gen_int_mode (-((GET_MODE_ALIGNMENT (SImode)
1144 << ASAN_SHADOW_SHIFT)
1145 / BITS_PER_UNIT), Pmode), NULL_RTX,
1146 1, OPTAB_DIRECT);
1147
1148 if (use_after_return_class == -1 && pbase)
1149 emit_move_insn (pbase, base);
1150
1151 base = expand_binop (Pmode, add_optab, base,
1152 gen_int_mode (base_offset - base_align_bias, Pmode),
1153 NULL_RTX, 1, OPTAB_DIRECT);
1154 orig_base = NULL_RTX;
1155 if (use_after_return_class != -1)
1156 {
1157 if (asan_detect_stack_use_after_return == NULL_TREE)
1158 {
1159 id = get_identifier ("__asan_option_detect_stack_use_after_return");
1160 decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
1161 integer_type_node);
1162 SET_DECL_ASSEMBLER_NAME (decl, id);
1163 TREE_ADDRESSABLE (decl) = 1;
1164 DECL_ARTIFICIAL (decl) = 1;
1165 DECL_IGNORED_P (decl) = 1;
1166 DECL_EXTERNAL (decl) = 1;
1167 TREE_STATIC (decl) = 1;
1168 TREE_PUBLIC (decl) = 1;
1169 TREE_USED (decl) = 1;
1170 asan_detect_stack_use_after_return = decl;
1171 }
1172 orig_base = gen_reg_rtx (Pmode);
1173 emit_move_insn (orig_base, base);
1174 ret = expand_normal (asan_detect_stack_use_after_return);
1175 lab = gen_label_rtx ();
1176 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1177 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1178 VOIDmode, 0, lab, very_likely);
1179 snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
1180 use_after_return_class);
1181 ret = init_one_libfunc (buf);
1182 rtx addr = convert_memory_address (ptr_mode, base);
1183 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 2,
1184 GEN_INT (asan_frame_size
1185 + base_align_bias),
1186 TYPE_MODE (pointer_sized_int_node),
1187 addr, ptr_mode);
1188 ret = convert_memory_address (Pmode, ret);
1189 emit_move_insn (base, ret);
1190 emit_label (lab);
1191 emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
1192 gen_int_mode (base_align_bias
1193 - base_offset, Pmode),
1194 NULL_RTX, 1, OPTAB_DIRECT));
1195 }
1196 mem = gen_rtx_MEM (ptr_mode, base);
1197 mem = adjust_address (mem, VOIDmode, base_align_bias);
1198 emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
1199 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1200 emit_move_insn (mem, expand_normal (str_cst));
1201 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1202 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
1203 id = get_identifier (buf);
1204 decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1205 VAR_DECL, id, char_type_node);
1206 SET_DECL_ASSEMBLER_NAME (decl, id);
1207 TREE_ADDRESSABLE (decl) = 1;
1208 TREE_READONLY (decl) = 1;
1209 DECL_ARTIFICIAL (decl) = 1;
1210 DECL_IGNORED_P (decl) = 1;
1211 TREE_STATIC (decl) = 1;
1212 TREE_PUBLIC (decl) = 0;
1213 TREE_USED (decl) = 1;
1214 DECL_INITIAL (decl) = decl;
1215 TREE_ASM_WRITTEN (decl) = 1;
1216 TREE_ASM_WRITTEN (id) = 1;
1217 emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
1218 shadow_base = expand_binop (Pmode, lshr_optab, base,
1219 GEN_INT (ASAN_SHADOW_SHIFT),
1220 NULL_RTX, 1, OPTAB_DIRECT);
1221 shadow_base
1222 = plus_constant (Pmode, shadow_base,
1223 asan_shadow_offset ()
1224 + (base_align_bias >> ASAN_SHADOW_SHIFT));
1225 gcc_assert (asan_shadow_set != -1
1226 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1227 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
1228 set_mem_alias_set (shadow_mem, asan_shadow_set);
1229 if (STRICT_ALIGNMENT)
1230 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1231 prev_offset = base_offset;
1232 for (l = length; l; l -= 2)
1233 {
1234 if (l == 2)
1235 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1236 offset = offsets[l - 1];
1237 if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
1238 {
1239 int i;
1240 HOST_WIDE_INT aoff
1241 = base_offset + ((offset - base_offset)
1242 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1243 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1244 (aoff - prev_offset)
1245 >> ASAN_SHADOW_SHIFT);
1246 prev_offset = aoff;
1247 for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
1248 if (aoff < offset)
1249 {
1250 if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
1251 shadow_bytes[i] = 0;
1252 else
1253 shadow_bytes[i] = offset - aoff;
1254 }
1255 else
1256 shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
1257 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1258 offset = aoff;
1259 }
1260 while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
1261 {
1262 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1263 (offset - prev_offset)
1264 >> ASAN_SHADOW_SHIFT);
1265 prev_offset = offset;
1266 memset (shadow_bytes, cur_shadow_byte, 4);
1267 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1268 offset += ASAN_RED_ZONE_SIZE;
1269 }
1270 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1271 }
1272 do_pending_stack_adjust ();
1273
1274 /* Construct epilogue sequence. */
1275 start_sequence ();
1276
1277 lab = NULL;
1278 if (use_after_return_class != -1)
1279 {
1280 rtx_code_label *lab2 = gen_label_rtx ();
1281 char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
1282 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1283 emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
1284 VOIDmode, 0, lab2, very_likely);
1285 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1286 set_mem_alias_set (shadow_mem, asan_shadow_set);
1287 mem = gen_rtx_MEM (ptr_mode, base);
1288 mem = adjust_address (mem, VOIDmode, base_align_bias);
1289 emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
1290 unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
1291 if (use_after_return_class < 5
1292 && can_store_by_pieces (sz, builtin_memset_read_str, &c,
1293 BITS_PER_UNIT, true))
1294 store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
1295 BITS_PER_UNIT, true, 0);
1296 else if (use_after_return_class >= 5
1297 || !set_storage_via_setmem (shadow_mem,
1298 GEN_INT (sz),
1299 gen_int_mode (c, QImode),
1300 BITS_PER_UNIT, BITS_PER_UNIT,
1301 -1, sz, sz, sz))
1302 {
1303 snprintf (buf, sizeof buf, "__asan_stack_free_%d",
1304 use_after_return_class);
1305 ret = init_one_libfunc (buf);
1306 rtx addr = convert_memory_address (ptr_mode, base);
1307 rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
1308 emit_library_call (ret, LCT_NORMAL, ptr_mode, 3, addr, ptr_mode,
1309 GEN_INT (asan_frame_size + base_align_bias),
1310 TYPE_MODE (pointer_sized_int_node),
1311 orig_addr, ptr_mode);
1312 }
1313 lab = gen_label_rtx ();
1314 emit_jump (lab);
1315 emit_label (lab2);
1316 }
1317
1318 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1319 set_mem_alias_set (shadow_mem, asan_shadow_set);
1320
1321 if (STRICT_ALIGNMENT)
1322 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1323
1324 prev_offset = base_offset;
1325 last_offset = base_offset;
1326 last_size = 0;
1327 for (l = length; l; l -= 2)
1328 {
1329 offset = base_offset + ((offsets[l - 1] - base_offset)
1330 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1331 if (last_offset + last_size != offset)
1332 {
1333 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1334 (last_offset - prev_offset)
1335 >> ASAN_SHADOW_SHIFT);
1336 prev_offset = last_offset;
1337 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1338 last_offset = offset;
1339 last_size = 0;
1340 }
1341 last_size += base_offset + ((offsets[l - 2] - base_offset)
1342 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1343 - offset;
1344 }
1345 if (last_size)
1346 {
1347 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1348 (last_offset - prev_offset)
1349 >> ASAN_SHADOW_SHIFT);
1350 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1351 }
1352
1353 do_pending_stack_adjust ();
1354 if (lab)
1355 emit_label (lab);
1356
1357 insns = get_insns ();
1358 end_sequence ();
1359 return insns;
1360 }
1361
1362 /* Return true if DECL, a global var, might be overridden and needs
1363 therefore a local alias. */
1364
1365 static bool
1366 asan_needs_local_alias (tree decl)
1367 {
1368 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1369 }
1370
1371 /* Return true if DECL is a VAR_DECL that should be protected
1372 by Address Sanitizer, by appending a red zone with protected
1373 shadow memory after it and aligning it to at least
1374 ASAN_RED_ZONE_SIZE bytes. */
1375
1376 bool
1377 asan_protect_global (tree decl)
1378 {
1379 if (!ASAN_GLOBALS)
1380 return false;
1381
1382 rtx rtl, symbol;
1383
1384 if (TREE_CODE (decl) == STRING_CST)
1385 {
1386 /* Instrument all STRING_CSTs except those created
1387 by asan_pp_string here. */
1388 if (shadow_ptr_types[0] != NULL_TREE
1389 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1390 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1391 return false;
1392 return true;
1393 }
1394 if (TREE_CODE (decl) != VAR_DECL
1395 /* TLS vars aren't statically protectable. */
1396 || DECL_THREAD_LOCAL_P (decl)
1397 /* Externs will be protected elsewhere. */
1398 || DECL_EXTERNAL (decl)
1399 || !DECL_RTL_SET_P (decl)
1400 /* Comdat vars pose an ABI problem, we can't know if
1401 the var that is selected by the linker will have
1402 padding or not. */
1403 || DECL_ONE_ONLY (decl)
1404 /* Similarly for common vars. People can use -fno-common.
1405 Note: Linux kernel is built with -fno-common, so we do instrument
1406 globals there even if it is C. */
1407 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
1408 /* Don't protect if using user section, often vars placed
1409 into user section from multiple TUs are then assumed
1410 to be an array of such vars, putting padding in there
1411 breaks this assumption. */
1412 || (DECL_SECTION_NAME (decl) != NULL
1413 && !symtab_node::get (decl)->implicit_section
1414 && !section_sanitized_p (DECL_SECTION_NAME (decl)))
1415 || DECL_SIZE (decl) == 0
1416 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1417 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1418 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
1419 || TREE_TYPE (decl) == ubsan_get_source_location_type ())
1420 return false;
1421
1422 rtl = DECL_RTL (decl);
1423 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1424 return false;
1425 symbol = XEXP (rtl, 0);
1426
1427 if (CONSTANT_POOL_ADDRESS_P (symbol)
1428 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1429 return false;
1430
1431 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1432 return false;
1433
1434 #ifndef ASM_OUTPUT_DEF
1435 if (asan_needs_local_alias (decl))
1436 return false;
1437 #endif
1438
1439 return true;
1440 }
1441
1442 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1443 IS_STORE is either 1 (for a store) or 0 (for a load). */
1444
1445 static tree
1446 report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1447 int *nargs)
1448 {
1449 static enum built_in_function report[2][2][6]
1450 = { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1451 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1452 BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
1453 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1454 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1455 BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } },
1456 { { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT,
1457 BUILT_IN_ASAN_REPORT_LOAD2_NOABORT,
1458 BUILT_IN_ASAN_REPORT_LOAD4_NOABORT,
1459 BUILT_IN_ASAN_REPORT_LOAD8_NOABORT,
1460 BUILT_IN_ASAN_REPORT_LOAD16_NOABORT,
1461 BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT },
1462 { BUILT_IN_ASAN_REPORT_STORE1_NOABORT,
1463 BUILT_IN_ASAN_REPORT_STORE2_NOABORT,
1464 BUILT_IN_ASAN_REPORT_STORE4_NOABORT,
1465 BUILT_IN_ASAN_REPORT_STORE8_NOABORT,
1466 BUILT_IN_ASAN_REPORT_STORE16_NOABORT,
1467 BUILT_IN_ASAN_REPORT_STORE_N_NOABORT } } };
1468 if (size_in_bytes == -1)
1469 {
1470 *nargs = 2;
1471 return builtin_decl_implicit (report[recover_p][is_store][5]);
1472 }
1473 *nargs = 1;
1474 int size_log2 = exact_log2 (size_in_bytes);
1475 return builtin_decl_implicit (report[recover_p][is_store][size_log2]);
1476 }
1477
1478 /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1479 IS_STORE is either 1 (for a store) or 0 (for a load). */
1480
1481 static tree
1482 check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1483 int *nargs)
1484 {
1485 static enum built_in_function check[2][2][6]
1486 = { { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
1487 BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
1488 BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
1489 { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
1490 BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
1491 BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } },
1492 { { BUILT_IN_ASAN_LOAD1_NOABORT,
1493 BUILT_IN_ASAN_LOAD2_NOABORT,
1494 BUILT_IN_ASAN_LOAD4_NOABORT,
1495 BUILT_IN_ASAN_LOAD8_NOABORT,
1496 BUILT_IN_ASAN_LOAD16_NOABORT,
1497 BUILT_IN_ASAN_LOADN_NOABORT },
1498 { BUILT_IN_ASAN_STORE1_NOABORT,
1499 BUILT_IN_ASAN_STORE2_NOABORT,
1500 BUILT_IN_ASAN_STORE4_NOABORT,
1501 BUILT_IN_ASAN_STORE8_NOABORT,
1502 BUILT_IN_ASAN_STORE16_NOABORT,
1503 BUILT_IN_ASAN_STOREN_NOABORT } } };
1504 if (size_in_bytes == -1)
1505 {
1506 *nargs = 2;
1507 return builtin_decl_implicit (check[recover_p][is_store][5]);
1508 }
1509 *nargs = 1;
1510 int size_log2 = exact_log2 (size_in_bytes);
1511 return builtin_decl_implicit (check[recover_p][is_store][size_log2]);
1512 }
1513
1514 /* Split the current basic block and create a condition statement
1515 insertion point right before or after the statement pointed to by
1516 ITER. Return an iterator to the point at which the caller might
1517 safely insert the condition statement.
1518
1519 THEN_BLOCK must be set to the address of an uninitialized instance
1520 of basic_block. The function will then set *THEN_BLOCK to the
1521 'then block' of the condition statement to be inserted by the
1522 caller.
1523
1524 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1525 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1526
1527 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1528 block' of the condition statement to be inserted by the caller.
1529
1530 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1531 statements starting from *ITER, and *THEN_BLOCK is a new empty
1532 block.
1533
1534 *ITER is adjusted to point to always point to the first statement
1535 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1536 same as what ITER was pointing to prior to calling this function,
1537 if BEFORE_P is true; otherwise, it is its following statement. */
1538
1539 gimple_stmt_iterator
1540 create_cond_insert_point (gimple_stmt_iterator *iter,
1541 bool before_p,
1542 bool then_more_likely_p,
1543 bool create_then_fallthru_edge,
1544 basic_block *then_block,
1545 basic_block *fallthrough_block)
1546 {
1547 gimple_stmt_iterator gsi = *iter;
1548
1549 if (!gsi_end_p (gsi) && before_p)
1550 gsi_prev (&gsi);
1551
1552 basic_block cur_bb = gsi_bb (*iter);
1553
1554 edge e = split_block (cur_bb, gsi_stmt (gsi));
1555
1556 /* Get a hold on the 'condition block', the 'then block' and the
1557 'else block'. */
1558 basic_block cond_bb = e->src;
1559 basic_block fallthru_bb = e->dest;
1560 basic_block then_bb = create_empty_bb (cond_bb);
1561 if (current_loops)
1562 {
1563 add_bb_to_loop (then_bb, cond_bb->loop_father);
1564 loops_state_set (LOOPS_NEED_FIXUP);
1565 }
1566
1567 /* Set up the newly created 'then block'. */
1568 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1569 int fallthrough_probability
1570 = then_more_likely_p
1571 ? PROB_VERY_UNLIKELY
1572 : PROB_ALWAYS - PROB_VERY_UNLIKELY;
1573 e->probability = PROB_ALWAYS - fallthrough_probability;
1574 if (create_then_fallthru_edge)
1575 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1576
1577 /* Set up the fallthrough basic block. */
1578 e = find_edge (cond_bb, fallthru_bb);
1579 e->flags = EDGE_FALSE_VALUE;
1580 e->count = cond_bb->count;
1581 e->probability = fallthrough_probability;
1582
1583 /* Update dominance info for the newly created then_bb; note that
1584 fallthru_bb's dominance info has already been updated by
1585 split_bock. */
1586 if (dom_info_available_p (CDI_DOMINATORS))
1587 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1588
1589 *then_block = then_bb;
1590 *fallthrough_block = fallthru_bb;
1591 *iter = gsi_start_bb (fallthru_bb);
1592
1593 return gsi_last_bb (cond_bb);
1594 }
1595
1596 /* Insert an if condition followed by a 'then block' right before the
1597 statement pointed to by ITER. The fallthrough block -- which is the
1598 else block of the condition as well as the destination of the
1599 outcoming edge of the 'then block' -- starts with the statement
1600 pointed to by ITER.
1601
1602 COND is the condition of the if.
1603
1604 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1605 'then block' is higher than the probability of the edge to the
1606 fallthrough block.
1607
1608 Upon completion of the function, *THEN_BB is set to the newly
1609 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1610 fallthrough block.
1611
1612 *ITER is adjusted to still point to the same statement it was
1613 pointing to initially. */
1614
1615 static void
1616 insert_if_then_before_iter (gcond *cond,
1617 gimple_stmt_iterator *iter,
1618 bool then_more_likely_p,
1619 basic_block *then_bb,
1620 basic_block *fallthrough_bb)
1621 {
1622 gimple_stmt_iterator cond_insert_point =
1623 create_cond_insert_point (iter,
1624 /*before_p=*/true,
1625 then_more_likely_p,
1626 /*create_then_fallthru_edge=*/true,
1627 then_bb,
1628 fallthrough_bb);
1629 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
1630 }
1631
1632 /* Build
1633 (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset (). */
1634
1635 static tree
1636 build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
1637 tree base_addr, tree shadow_ptr_type)
1638 {
1639 tree t, uintptr_type = TREE_TYPE (base_addr);
1640 tree shadow_type = TREE_TYPE (shadow_ptr_type);
1641 gimple g;
1642
1643 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
1644 g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
1645 base_addr, t);
1646 gimple_set_location (g, location);
1647 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1648
1649 t = build_int_cst (uintptr_type, asan_shadow_offset ());
1650 g = gimple_build_assign (make_ssa_name (uintptr_type), PLUS_EXPR,
1651 gimple_assign_lhs (g), t);
1652 gimple_set_location (g, location);
1653 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1654
1655 g = gimple_build_assign (make_ssa_name (shadow_ptr_type), NOP_EXPR,
1656 gimple_assign_lhs (g));
1657 gimple_set_location (g, location);
1658 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1659
1660 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
1661 build_int_cst (shadow_ptr_type, 0));
1662 g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
1663 gimple_set_location (g, location);
1664 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1665 return gimple_assign_lhs (g);
1666 }
1667
1668 /* BASE can already be an SSA_NAME; in that case, do not create a
1669 new SSA_NAME for it. */
1670
1671 static tree
1672 maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
1673 bool before_p)
1674 {
1675 if (TREE_CODE (base) == SSA_NAME)
1676 return base;
1677 gimple g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)),
1678 TREE_CODE (base), base);
1679 gimple_set_location (g, loc);
1680 if (before_p)
1681 gsi_insert_before (iter, g, GSI_SAME_STMT);
1682 else
1683 gsi_insert_after (iter, g, GSI_NEW_STMT);
1684 return gimple_assign_lhs (g);
1685 }
1686
1687 /* LEN can already have necessary size and precision;
1688 in that case, do not create a new variable. */
1689
1690 tree
1691 maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
1692 bool before_p)
1693 {
1694 if (ptrofftype_p (len))
1695 return len;
1696 gimple g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
1697 NOP_EXPR, len);
1698 gimple_set_location (g, loc);
1699 if (before_p)
1700 gsi_insert_before (iter, g, GSI_SAME_STMT);
1701 else
1702 gsi_insert_after (iter, g, GSI_NEW_STMT);
1703 return gimple_assign_lhs (g);
1704 }
1705
1706 /* Instrument the memory access instruction BASE. Insert new
1707 statements before or after ITER.
1708
1709 Note that the memory access represented by BASE can be either an
1710 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1711 location. IS_STORE is TRUE for a store, FALSE for a load.
1712 BEFORE_P is TRUE for inserting the instrumentation code before
1713 ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
1714 for a scalar memory access and FALSE for memory region access.
1715 NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
1716 length. ALIGN tells alignment of accessed memory object.
1717
1718 START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
1719 memory region have already been instrumented.
1720
1721 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1722 statement it was pointing to prior to calling this function,
1723 otherwise, it points to the statement logically following it. */
1724
1725 static void
1726 build_check_stmt (location_t loc, tree base, tree len,
1727 HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
1728 bool is_non_zero_len, bool before_p, bool is_store,
1729 bool is_scalar_access, unsigned int align = 0)
1730 {
1731 gimple_stmt_iterator gsi = *iter;
1732 gimple g;
1733
1734 gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
1735
1736 gsi = *iter;
1737
1738 base = unshare_expr (base);
1739 base = maybe_create_ssa_name (loc, base, &gsi, before_p);
1740
1741 if (len)
1742 {
1743 len = unshare_expr (len);
1744 len = maybe_cast_to_ptrmode (loc, len, iter, before_p);
1745 }
1746 else
1747 {
1748 gcc_assert (size_in_bytes != -1);
1749 len = build_int_cst (pointer_sized_int_node, size_in_bytes);
1750 }
1751
1752 if (size_in_bytes > 1)
1753 {
1754 if ((size_in_bytes & (size_in_bytes - 1)) != 0
1755 || size_in_bytes > 16)
1756 is_scalar_access = false;
1757 else if (align && align < size_in_bytes * BITS_PER_UNIT)
1758 {
1759 /* On non-strict alignment targets, if
1760 16-byte access is just 8-byte aligned,
1761 this will result in misaligned shadow
1762 memory 2 byte load, but otherwise can
1763 be handled using one read. */
1764 if (size_in_bytes != 16
1765 || STRICT_ALIGNMENT
1766 || align < 8 * BITS_PER_UNIT)
1767 is_scalar_access = false;
1768 }
1769 }
1770
1771 HOST_WIDE_INT flags = 0;
1772 if (is_store)
1773 flags |= ASAN_CHECK_STORE;
1774 if (is_non_zero_len)
1775 flags |= ASAN_CHECK_NON_ZERO_LEN;
1776 if (is_scalar_access)
1777 flags |= ASAN_CHECK_SCALAR_ACCESS;
1778
1779 g = gimple_build_call_internal (IFN_ASAN_CHECK, 4,
1780 build_int_cst (integer_type_node, flags),
1781 base, len,
1782 build_int_cst (integer_type_node,
1783 align / BITS_PER_UNIT));
1784 gimple_set_location (g, loc);
1785 if (before_p)
1786 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
1787 else
1788 {
1789 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1790 gsi_next (&gsi);
1791 *iter = gsi;
1792 }
1793 }
1794
1795 /* If T represents a memory access, add instrumentation code before ITER.
1796 LOCATION is source code location.
1797 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
1798
1799 static void
1800 instrument_derefs (gimple_stmt_iterator *iter, tree t,
1801 location_t location, bool is_store)
1802 {
1803 if (is_store && !ASAN_INSTRUMENT_WRITES)
1804 return;
1805 if (!is_store && !ASAN_INSTRUMENT_READS)
1806 return;
1807
1808 tree type, base;
1809 HOST_WIDE_INT size_in_bytes;
1810
1811 type = TREE_TYPE (t);
1812 switch (TREE_CODE (t))
1813 {
1814 case ARRAY_REF:
1815 case COMPONENT_REF:
1816 case INDIRECT_REF:
1817 case MEM_REF:
1818 case VAR_DECL:
1819 case BIT_FIELD_REF:
1820 break;
1821 /* FALLTHRU */
1822 default:
1823 return;
1824 }
1825
1826 size_in_bytes = int_size_in_bytes (type);
1827 if (size_in_bytes <= 0)
1828 return;
1829
1830 HOST_WIDE_INT bitsize, bitpos;
1831 tree offset;
1832 machine_mode mode;
1833 int volatilep = 0, unsignedp = 0;
1834 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset,
1835 &mode, &unsignedp, &volatilep, false);
1836
1837 if (TREE_CODE (t) == COMPONENT_REF
1838 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
1839 {
1840 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
1841 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
1842 TREE_OPERAND (t, 0), repr,
1843 NULL_TREE), location, is_store);
1844 return;
1845 }
1846
1847 if (bitpos % BITS_PER_UNIT
1848 || bitsize != size_in_bytes * BITS_PER_UNIT)
1849 return;
1850
1851 if (TREE_CODE (inner) == VAR_DECL
1852 && offset == NULL_TREE
1853 && bitpos >= 0
1854 && DECL_SIZE (inner)
1855 && tree_fits_shwi_p (DECL_SIZE (inner))
1856 && bitpos + bitsize <= tree_to_shwi (DECL_SIZE (inner)))
1857 {
1858 if (DECL_THREAD_LOCAL_P (inner))
1859 return;
1860 if (!ASAN_GLOBALS && is_global_var (inner))
1861 return;
1862 if (!TREE_STATIC (inner))
1863 {
1864 /* Automatic vars in the current function will be always
1865 accessible. */
1866 if (decl_function_context (inner) == current_function_decl)
1867 return;
1868 }
1869 /* Always instrument external vars, they might be dynamically
1870 initialized. */
1871 else if (!DECL_EXTERNAL (inner))
1872 {
1873 /* For static vars if they are known not to be dynamically
1874 initialized, they will be always accessible. */
1875 varpool_node *vnode = varpool_node::get (inner);
1876 if (vnode && !vnode->dynamically_initialized)
1877 return;
1878 }
1879 }
1880
1881 base = build_fold_addr_expr (t);
1882 if (!has_mem_ref_been_instrumented (base, size_in_bytes))
1883 {
1884 unsigned int align = get_object_alignment (t);
1885 build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
1886 /*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true,
1887 is_store, /*is_scalar_access*/true, align);
1888 update_mem_ref_hash_table (base, size_in_bytes);
1889 update_mem_ref_hash_table (t, size_in_bytes);
1890 }
1891
1892 }
1893
1894 /* Insert a memory reference into the hash table if access length
1895 can be determined in compile time. */
1896
1897 static void
1898 maybe_update_mem_ref_hash_table (tree base, tree len)
1899 {
1900 if (!POINTER_TYPE_P (TREE_TYPE (base))
1901 || !INTEGRAL_TYPE_P (TREE_TYPE (len)))
1902 return;
1903
1904 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1905
1906 if (size_in_bytes != -1)
1907 update_mem_ref_hash_table (base, size_in_bytes);
1908 }
1909
1910 /* Instrument an access to a contiguous memory region that starts at
1911 the address pointed to by BASE, over a length of LEN (expressed in
1912 the sizeof (*BASE) bytes). ITER points to the instruction before
1913 which the instrumentation instructions must be inserted. LOCATION
1914 is the source location that the instrumentation instructions must
1915 have. If IS_STORE is true, then the memory access is a store;
1916 otherwise, it's a load. */
1917
1918 static void
1919 instrument_mem_region_access (tree base, tree len,
1920 gimple_stmt_iterator *iter,
1921 location_t location, bool is_store)
1922 {
1923 if (!POINTER_TYPE_P (TREE_TYPE (base))
1924 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
1925 || integer_zerop (len))
1926 return;
1927
1928 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1929
1930 if ((size_in_bytes == -1)
1931 || !has_mem_ref_been_instrumented (base, size_in_bytes))
1932 {
1933 build_check_stmt (location, base, len, size_in_bytes, iter,
1934 /*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true,
1935 is_store, /*is_scalar_access*/false, /*align*/0);
1936 }
1937
1938 maybe_update_mem_ref_hash_table (base, len);
1939 *iter = gsi_for_stmt (gsi_stmt (*iter));
1940 }
1941
1942 /* Instrument the call to a built-in memory access function that is
1943 pointed to by the iterator ITER.
1944
1945 Upon completion, return TRUE iff *ITER has been advanced to the
1946 statement following the one it was originally pointing to. */
1947
1948 static bool
1949 instrument_builtin_call (gimple_stmt_iterator *iter)
1950 {
1951 if (!ASAN_MEMINTRIN)
1952 return false;
1953
1954 bool iter_advanced_p = false;
1955 gcall *call = as_a <gcall *> (gsi_stmt (*iter));
1956
1957 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
1958
1959 location_t loc = gimple_location (call);
1960
1961 asan_mem_ref src0, src1, dest;
1962 asan_mem_ref_init (&src0, NULL, 1);
1963 asan_mem_ref_init (&src1, NULL, 1);
1964 asan_mem_ref_init (&dest, NULL, 1);
1965
1966 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
1967 bool src0_is_store = false, src1_is_store = false, dest_is_store = false,
1968 dest_is_deref = false, intercepted_p = true;
1969
1970 if (get_mem_refs_of_builtin_call (call,
1971 &src0, &src0_len, &src0_is_store,
1972 &src1, &src1_len, &src1_is_store,
1973 &dest, &dest_len, &dest_is_store,
1974 &dest_is_deref, &intercepted_p))
1975 {
1976 if (dest_is_deref)
1977 {
1978 instrument_derefs (iter, dest.start, loc, dest_is_store);
1979 gsi_next (iter);
1980 iter_advanced_p = true;
1981 }
1982 else if (!intercepted_p
1983 && (src0_len || src1_len || dest_len))
1984 {
1985 if (src0.start != NULL_TREE)
1986 instrument_mem_region_access (src0.start, src0_len,
1987 iter, loc, /*is_store=*/false);
1988 if (src1.start != NULL_TREE)
1989 instrument_mem_region_access (src1.start, src1_len,
1990 iter, loc, /*is_store=*/false);
1991 if (dest.start != NULL_TREE)
1992 instrument_mem_region_access (dest.start, dest_len,
1993 iter, loc, /*is_store=*/true);
1994
1995 *iter = gsi_for_stmt (call);
1996 gsi_next (iter);
1997 iter_advanced_p = true;
1998 }
1999 else
2000 {
2001 if (src0.start != NULL_TREE)
2002 maybe_update_mem_ref_hash_table (src0.start, src0_len);
2003 if (src1.start != NULL_TREE)
2004 maybe_update_mem_ref_hash_table (src1.start, src1_len);
2005 if (dest.start != NULL_TREE)
2006 maybe_update_mem_ref_hash_table (dest.start, dest_len);
2007 }
2008 }
2009 return iter_advanced_p;
2010 }
2011
2012 /* Instrument the assignment statement ITER if it is subject to
2013 instrumentation. Return TRUE iff instrumentation actually
2014 happened. In that case, the iterator ITER is advanced to the next
2015 logical expression following the one initially pointed to by ITER,
2016 and the relevant memory reference that which access has been
2017 instrumented is added to the memory references hash table. */
2018
2019 static bool
2020 maybe_instrument_assignment (gimple_stmt_iterator *iter)
2021 {
2022 gimple s = gsi_stmt (*iter);
2023
2024 gcc_assert (gimple_assign_single_p (s));
2025
2026 tree ref_expr = NULL_TREE;
2027 bool is_store, is_instrumented = false;
2028
2029 if (gimple_store_p (s))
2030 {
2031 ref_expr = gimple_assign_lhs (s);
2032 is_store = true;
2033 instrument_derefs (iter, ref_expr,
2034 gimple_location (s),
2035 is_store);
2036 is_instrumented = true;
2037 }
2038
2039 if (gimple_assign_load_p (s))
2040 {
2041 ref_expr = gimple_assign_rhs1 (s);
2042 is_store = false;
2043 instrument_derefs (iter, ref_expr,
2044 gimple_location (s),
2045 is_store);
2046 is_instrumented = true;
2047 }
2048
2049 if (is_instrumented)
2050 gsi_next (iter);
2051
2052 return is_instrumented;
2053 }
2054
2055 /* Instrument the function call pointed to by the iterator ITER, if it
2056 is subject to instrumentation. At the moment, the only function
2057 calls that are instrumented are some built-in functions that access
2058 memory. Look at instrument_builtin_call to learn more.
2059
2060 Upon completion return TRUE iff *ITER was advanced to the statement
2061 following the one it was originally pointing to. */
2062
2063 static bool
2064 maybe_instrument_call (gimple_stmt_iterator *iter)
2065 {
2066 gimple stmt = gsi_stmt (*iter);
2067 bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
2068
2069 if (is_builtin && instrument_builtin_call (iter))
2070 return true;
2071
2072 if (gimple_call_noreturn_p (stmt))
2073 {
2074 if (is_builtin)
2075 {
2076 tree callee = gimple_call_fndecl (stmt);
2077 switch (DECL_FUNCTION_CODE (callee))
2078 {
2079 case BUILT_IN_UNREACHABLE:
2080 case BUILT_IN_TRAP:
2081 /* Don't instrument these. */
2082 return false;
2083 default:
2084 break;
2085 }
2086 }
2087 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
2088 gimple g = gimple_build_call (decl, 0);
2089 gimple_set_location (g, gimple_location (stmt));
2090 gsi_insert_before (iter, g, GSI_SAME_STMT);
2091 }
2092 return false;
2093 }
2094
2095 /* Walk each instruction of all basic block and instrument those that
2096 represent memory references: loads, stores, or function calls.
2097 In a given basic block, this function avoids instrumenting memory
2098 references that have already been instrumented. */
2099
2100 static void
2101 transform_statements (void)
2102 {
2103 basic_block bb, last_bb = NULL;
2104 gimple_stmt_iterator i;
2105 int saved_last_basic_block = last_basic_block_for_fn (cfun);
2106
2107 FOR_EACH_BB_FN (bb, cfun)
2108 {
2109 basic_block prev_bb = bb;
2110
2111 if (bb->index >= saved_last_basic_block) continue;
2112
2113 /* Flush the mem ref hash table, if current bb doesn't have
2114 exactly one predecessor, or if that predecessor (skipping
2115 over asan created basic blocks) isn't the last processed
2116 basic block. Thus we effectively flush on extended basic
2117 block boundaries. */
2118 while (single_pred_p (prev_bb))
2119 {
2120 prev_bb = single_pred (prev_bb);
2121 if (prev_bb->index < saved_last_basic_block)
2122 break;
2123 }
2124 if (prev_bb != last_bb)
2125 empty_mem_ref_hash_table ();
2126 last_bb = bb;
2127
2128 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
2129 {
2130 gimple s = gsi_stmt (i);
2131
2132 if (has_stmt_been_instrumented_p (s))
2133 gsi_next (&i);
2134 else if (gimple_assign_single_p (s)
2135 && !gimple_clobber_p (s)
2136 && maybe_instrument_assignment (&i))
2137 /* Nothing to do as maybe_instrument_assignment advanced
2138 the iterator I. */;
2139 else if (is_gimple_call (s) && maybe_instrument_call (&i))
2140 /* Nothing to do as maybe_instrument_call
2141 advanced the iterator I. */;
2142 else
2143 {
2144 /* No instrumentation happened.
2145
2146 If the current instruction is a function call that
2147 might free something, let's forget about the memory
2148 references that got instrumented. Otherwise we might
2149 miss some instrumentation opportunities. */
2150 if (is_gimple_call (s) && !nonfreeing_call_p (s))
2151 empty_mem_ref_hash_table ();
2152
2153 gsi_next (&i);
2154 }
2155 }
2156 }
2157 free_mem_ref_resources ();
2158 }
2159
2160 /* Build
2161 __asan_before_dynamic_init (module_name)
2162 or
2163 __asan_after_dynamic_init ()
2164 call. */
2165
2166 tree
2167 asan_dynamic_init_call (bool after_p)
2168 {
2169 tree fn = builtin_decl_implicit (after_p
2170 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2171 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
2172 tree module_name_cst = NULL_TREE;
2173 if (!after_p)
2174 {
2175 pretty_printer module_name_pp;
2176 pp_string (&module_name_pp, main_input_filename);
2177
2178 if (shadow_ptr_types[0] == NULL_TREE)
2179 asan_init_shadow_ptr_types ();
2180 module_name_cst = asan_pp_string (&module_name_pp);
2181 module_name_cst = fold_convert (const_ptr_type_node,
2182 module_name_cst);
2183 }
2184
2185 return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
2186 }
2187
2188 /* Build
2189 struct __asan_global
2190 {
2191 const void *__beg;
2192 uptr __size;
2193 uptr __size_with_redzone;
2194 const void *__name;
2195 const void *__module_name;
2196 uptr __has_dynamic_init;
2197 __asan_global_source_location *__location;
2198 } type. */
2199
2200 static tree
2201 asan_global_struct (void)
2202 {
2203 static const char *field_names[7]
2204 = { "__beg", "__size", "__size_with_redzone",
2205 "__name", "__module_name", "__has_dynamic_init", "__location"};
2206 tree fields[7], ret;
2207 int i;
2208
2209 ret = make_node (RECORD_TYPE);
2210 for (i = 0; i < 7; i++)
2211 {
2212 fields[i]
2213 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2214 get_identifier (field_names[i]),
2215 (i == 0 || i == 3) ? const_ptr_type_node
2216 : pointer_sized_int_node);
2217 DECL_CONTEXT (fields[i]) = ret;
2218 if (i)
2219 DECL_CHAIN (fields[i - 1]) = fields[i];
2220 }
2221 tree type_decl = build_decl (input_location, TYPE_DECL,
2222 get_identifier ("__asan_global"), ret);
2223 DECL_IGNORED_P (type_decl) = 1;
2224 DECL_ARTIFICIAL (type_decl) = 1;
2225 TYPE_FIELDS (ret) = fields[0];
2226 TYPE_NAME (ret) = type_decl;
2227 TYPE_STUB_DECL (ret) = type_decl;
2228 layout_type (ret);
2229 return ret;
2230 }
2231
2232 /* Append description of a single global DECL into vector V.
2233 TYPE is __asan_global struct type as returned by asan_global_struct. */
2234
2235 static void
2236 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
2237 {
2238 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2239 unsigned HOST_WIDE_INT size;
2240 tree str_cst, module_name_cst, refdecl = decl;
2241 vec<constructor_elt, va_gc> *vinner = NULL;
2242
2243 pretty_printer asan_pp, module_name_pp;
2244
2245 if (DECL_NAME (decl))
2246 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
2247 else
2248 pp_string (&asan_pp, "<unknown>");
2249 str_cst = asan_pp_string (&asan_pp);
2250
2251 pp_string (&module_name_pp, main_input_filename);
2252 module_name_cst = asan_pp_string (&module_name_pp);
2253
2254 if (asan_needs_local_alias (decl))
2255 {
2256 char buf[20];
2257 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
2258 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2259 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2260 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2261 TREE_READONLY (refdecl) = TREE_READONLY (decl);
2262 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2263 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2264 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2265 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2266 TREE_STATIC (refdecl) = 1;
2267 TREE_PUBLIC (refdecl) = 0;
2268 TREE_USED (refdecl) = 1;
2269 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2270 }
2271
2272 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2273 fold_convert (const_ptr_type_node,
2274 build_fold_addr_expr (refdecl)));
2275 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2276 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2277 size += asan_red_zone_size (size);
2278 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2279 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2280 fold_convert (const_ptr_type_node, str_cst));
2281 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2282 fold_convert (const_ptr_type_node, module_name_cst));
2283 varpool_node *vnode = varpool_node::get (decl);
2284 int has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
2285 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2286 build_int_cst (uptr, has_dynamic_init));
2287 tree locptr = NULL_TREE;
2288 location_t loc = DECL_SOURCE_LOCATION (decl);
2289 expanded_location xloc = expand_location (loc);
2290 if (xloc.file != NULL)
2291 {
2292 static int lasanloccnt = 0;
2293 char buf[25];
2294 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt);
2295 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2296 ubsan_get_source_location_type ());
2297 TREE_STATIC (var) = 1;
2298 TREE_PUBLIC (var) = 0;
2299 DECL_ARTIFICIAL (var) = 1;
2300 DECL_IGNORED_P (var) = 1;
2301 pretty_printer filename_pp;
2302 pp_string (&filename_pp, xloc.file);
2303 tree str = asan_pp_string (&filename_pp);
2304 tree ctor = build_constructor_va (TREE_TYPE (var), 3,
2305 NULL_TREE, str, NULL_TREE,
2306 build_int_cst (unsigned_type_node,
2307 xloc.line), NULL_TREE,
2308 build_int_cst (unsigned_type_node,
2309 xloc.column));
2310 TREE_CONSTANT (ctor) = 1;
2311 TREE_STATIC (ctor) = 1;
2312 DECL_INITIAL (var) = ctor;
2313 varpool_node::finalize_decl (var);
2314 locptr = fold_convert (uptr, build_fold_addr_expr (var));
2315 }
2316 else
2317 locptr = build_int_cst (uptr, 0);
2318 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr);
2319 init = build_constructor (type, vinner);
2320 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2321 }
2322
2323 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2324 void
2325 initialize_sanitizer_builtins (void)
2326 {
2327 tree decl;
2328
2329 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2330 return;
2331
2332 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2333 tree BT_FN_VOID_PTR
2334 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2335 tree BT_FN_VOID_CONST_PTR
2336 = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
2337 tree BT_FN_VOID_PTR_PTR
2338 = build_function_type_list (void_type_node, ptr_type_node,
2339 ptr_type_node, NULL_TREE);
2340 tree BT_FN_VOID_PTR_PTR_PTR
2341 = build_function_type_list (void_type_node, ptr_type_node,
2342 ptr_type_node, ptr_type_node, NULL_TREE);
2343 tree BT_FN_VOID_PTR_PTRMODE
2344 = build_function_type_list (void_type_node, ptr_type_node,
2345 pointer_sized_int_node, NULL_TREE);
2346 tree BT_FN_VOID_INT
2347 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
2348 tree BT_FN_SIZE_CONST_PTR_INT
2349 = build_function_type_list (size_type_node, const_ptr_type_node,
2350 integer_type_node, NULL_TREE);
2351 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2352 tree BT_FN_IX_CONST_VPTR_INT[5];
2353 tree BT_FN_IX_VPTR_IX_INT[5];
2354 tree BT_FN_VOID_VPTR_IX_INT[5];
2355 tree vptr
2356 = build_pointer_type (build_qualified_type (void_type_node,
2357 TYPE_QUAL_VOLATILE));
2358 tree cvptr
2359 = build_pointer_type (build_qualified_type (void_type_node,
2360 TYPE_QUAL_VOLATILE
2361 |TYPE_QUAL_CONST));
2362 tree boolt
2363 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2364 int i;
2365 for (i = 0; i < 5; i++)
2366 {
2367 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2368 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2369 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
2370 integer_type_node, integer_type_node,
2371 NULL_TREE);
2372 BT_FN_IX_CONST_VPTR_INT[i]
2373 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2374 BT_FN_IX_VPTR_IX_INT[i]
2375 = build_function_type_list (ix, vptr, ix, integer_type_node,
2376 NULL_TREE);
2377 BT_FN_VOID_VPTR_IX_INT[i]
2378 = build_function_type_list (void_type_node, vptr, ix,
2379 integer_type_node, NULL_TREE);
2380 }
2381 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2382 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2383 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2384 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2385 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2386 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2387 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2388 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2389 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2390 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2391 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2392 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2393 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2394 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2395 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2396 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2397 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2398 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2399 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2400 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2401 #undef ATTR_NOTHROW_LEAF_LIST
2402 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2403 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2404 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2405 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2406 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2407 #undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2408 #define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
2409 ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
2410 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2411 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2412 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2413 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2414 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2415 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2416 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2417 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2418 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2419 #undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
2420 #define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
2421 /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2422 #undef ATTR_PURE_NOTHROW_LEAF_LIST
2423 #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
2424 #undef DEF_SANITIZER_BUILTIN
2425 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2426 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2427 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2428 set_call_expr_flags (decl, ATTRS); \
2429 set_builtin_decl (ENUM, decl, true);
2430
2431 #include "sanitizer.def"
2432
2433 /* -fsanitize=object-size uses __builtin_object_size, but that might
2434 not be available for e.g. Fortran at this point. We use
2435 DEF_SANITIZER_BUILTIN here only as a convenience macro. */
2436 if ((flag_sanitize & SANITIZE_OBJECT_SIZE)
2437 && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE))
2438 DEF_SANITIZER_BUILTIN (BUILT_IN_OBJECT_SIZE, "object_size",
2439 BT_FN_SIZE_CONST_PTR_INT,
2440 ATTR_PURE_NOTHROW_LEAF_LIST)
2441
2442 #undef DEF_SANITIZER_BUILTIN
2443 }
2444
2445 /* Called via htab_traverse. Count number of emitted
2446 STRING_CSTs in the constant hash table. */
2447
2448 int
2449 count_string_csts (constant_descriptor_tree **slot,
2450 unsigned HOST_WIDE_INT *data)
2451 {
2452 struct constant_descriptor_tree *desc = *slot;
2453 if (TREE_CODE (desc->value) == STRING_CST
2454 && TREE_ASM_WRITTEN (desc->value)
2455 && asan_protect_global (desc->value))
2456 ++*data;
2457 return 1;
2458 }
2459
2460 /* Helper structure to pass two parameters to
2461 add_string_csts. */
2462
2463 struct asan_add_string_csts_data
2464 {
2465 tree type;
2466 vec<constructor_elt, va_gc> *v;
2467 };
2468
2469 /* Called via hash_table::traverse. Call asan_add_global
2470 on emitted STRING_CSTs from the constant hash table. */
2471
2472 int
2473 add_string_csts (constant_descriptor_tree **slot,
2474 asan_add_string_csts_data *aascd)
2475 {
2476 struct constant_descriptor_tree *desc = *slot;
2477 if (TREE_CODE (desc->value) == STRING_CST
2478 && TREE_ASM_WRITTEN (desc->value)
2479 && asan_protect_global (desc->value))
2480 {
2481 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
2482 aascd->type, aascd->v);
2483 }
2484 return 1;
2485 }
2486
2487 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2488 invoke ggc_collect. */
2489 static GTY(()) tree asan_ctor_statements;
2490
2491 /* Module-level instrumentation.
2492 - Insert __asan_init_vN() into the list of CTORs.
2493 - TODO: insert redzones around globals.
2494 */
2495
2496 void
2497 asan_finish_file (void)
2498 {
2499 varpool_node *vnode;
2500 unsigned HOST_WIDE_INT gcount = 0;
2501
2502 if (shadow_ptr_types[0] == NULL_TREE)
2503 asan_init_shadow_ptr_types ();
2504 /* Avoid instrumenting code in the asan ctors/dtors.
2505 We don't need to insert padding after the description strings,
2506 nor after .LASAN* array. */
2507 flag_sanitize &= ~SANITIZE_ADDRESS;
2508
2509 /* For user-space we want asan constructors to run first.
2510 Linux kernel does not support priorities other than default, and the only
2511 other user of constructors is coverage. So we run with the default
2512 priority. */
2513 int priority = flag_sanitize & SANITIZE_USER_ADDRESS
2514 ? MAX_RESERVED_INIT_PRIORITY - 1 : DEFAULT_INIT_PRIORITY;
2515
2516 if (flag_sanitize & SANITIZE_USER_ADDRESS)
2517 {
2518 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
2519 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2520 }
2521 FOR_EACH_DEFINED_VARIABLE (vnode)
2522 if (TREE_ASM_WRITTEN (vnode->decl)
2523 && asan_protect_global (vnode->decl))
2524 ++gcount;
2525 hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab ();
2526 const_desc_htab->traverse<unsigned HOST_WIDE_INT *, count_string_csts>
2527 (&gcount);
2528 if (gcount)
2529 {
2530 tree type = asan_global_struct (), var, ctor;
2531 tree dtor_statements = NULL_TREE;
2532 vec<constructor_elt, va_gc> *v;
2533 char buf[20];
2534
2535 type = build_array_type_nelts (type, gcount);
2536 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
2537 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2538 type);
2539 TREE_STATIC (var) = 1;
2540 TREE_PUBLIC (var) = 0;
2541 DECL_ARTIFICIAL (var) = 1;
2542 DECL_IGNORED_P (var) = 1;
2543 vec_alloc (v, gcount);
2544 FOR_EACH_DEFINED_VARIABLE (vnode)
2545 if (TREE_ASM_WRITTEN (vnode->decl)
2546 && asan_protect_global (vnode->decl))
2547 asan_add_global (vnode->decl, TREE_TYPE (type), v);
2548 struct asan_add_string_csts_data aascd;
2549 aascd.type = TREE_TYPE (type);
2550 aascd.v = v;
2551 const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts>
2552 (&aascd);
2553 ctor = build_constructor (type, v);
2554 TREE_CONSTANT (ctor) = 1;
2555 TREE_STATIC (ctor) = 1;
2556 DECL_INITIAL (var) = ctor;
2557 varpool_node::finalize_decl (var);
2558
2559 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
2560 tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
2561 append_to_statement_list (build_call_expr (fn, 2,
2562 build_fold_addr_expr (var),
2563 gcount_tree),
2564 &asan_ctor_statements);
2565
2566 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
2567 append_to_statement_list (build_call_expr (fn, 2,
2568 build_fold_addr_expr (var),
2569 gcount_tree),
2570 &dtor_statements);
2571 cgraph_build_static_cdtor ('D', dtor_statements, priority);
2572 }
2573 if (asan_ctor_statements)
2574 cgraph_build_static_cdtor ('I', asan_ctor_statements, priority);
2575 flag_sanitize |= SANITIZE_ADDRESS;
2576 }
2577
2578 /* Expand the ASAN_{LOAD,STORE} builtins. */
2579
2580 bool
2581 asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
2582 {
2583 gimple g = gsi_stmt (*iter);
2584 location_t loc = gimple_location (g);
2585
2586 bool recover_p
2587 = (flag_sanitize & flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
2588
2589 HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
2590 gcc_assert (flags < ASAN_CHECK_LAST);
2591 bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
2592 bool is_store = (flags & ASAN_CHECK_STORE) != 0;
2593 bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
2594
2595 tree base = gimple_call_arg (g, 1);
2596 tree len = gimple_call_arg (g, 2);
2597 HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3));
2598
2599 HOST_WIDE_INT size_in_bytes
2600 = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2601
2602 if (use_calls)
2603 {
2604 /* Instrument using callbacks. */
2605 gimple g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2606 NOP_EXPR, base);
2607 gimple_set_location (g, loc);
2608 gsi_insert_before (iter, g, GSI_SAME_STMT);
2609 tree base_addr = gimple_assign_lhs (g);
2610
2611 int nargs;
2612 tree fun = check_func (is_store, recover_p, size_in_bytes, &nargs);
2613 if (nargs == 1)
2614 g = gimple_build_call (fun, 1, base_addr);
2615 else
2616 {
2617 gcc_assert (nargs == 2);
2618 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2619 NOP_EXPR, len);
2620 gimple_set_location (g, loc);
2621 gsi_insert_before (iter, g, GSI_SAME_STMT);
2622 tree sz_arg = gimple_assign_lhs (g);
2623 g = gimple_build_call (fun, nargs, base_addr, sz_arg);
2624 }
2625 gimple_set_location (g, loc);
2626 gsi_replace (iter, g, false);
2627 return false;
2628 }
2629
2630 HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
2631
2632 tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
2633 tree shadow_type = TREE_TYPE (shadow_ptr_type);
2634
2635 gimple_stmt_iterator gsi = *iter;
2636
2637 if (!is_non_zero_len)
2638 {
2639 /* So, the length of the memory area to asan-protect is
2640 non-constant. Let's guard the generated instrumentation code
2641 like:
2642
2643 if (len != 0)
2644 {
2645 //asan instrumentation code goes here.
2646 }
2647 // falltrough instructions, starting with *ITER. */
2648
2649 g = gimple_build_cond (NE_EXPR,
2650 len,
2651 build_int_cst (TREE_TYPE (len), 0),
2652 NULL_TREE, NULL_TREE);
2653 gimple_set_location (g, loc);
2654
2655 basic_block then_bb, fallthrough_bb;
2656 insert_if_then_before_iter (as_a <gcond *> (g), iter,
2657 /*then_more_likely_p=*/true,
2658 &then_bb, &fallthrough_bb);
2659 /* Note that fallthrough_bb starts with the statement that was
2660 pointed to by ITER. */
2661
2662 /* The 'then block' of the 'if (len != 0) condition is where
2663 we'll generate the asan instrumentation code now. */
2664 gsi = gsi_last_bb (then_bb);
2665 }
2666
2667 /* Get an iterator on the point where we can add the condition
2668 statement for the instrumentation. */
2669 basic_block then_bb, else_bb;
2670 gsi = create_cond_insert_point (&gsi, /*before_p*/false,
2671 /*then_more_likely_p=*/false,
2672 /*create_then_fallthru_edge*/recover_p,
2673 &then_bb,
2674 &else_bb);
2675
2676 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2677 NOP_EXPR, base);
2678 gimple_set_location (g, loc);
2679 gsi_insert_before (&gsi, g, GSI_NEW_STMT);
2680 tree base_addr = gimple_assign_lhs (g);
2681
2682 tree t = NULL_TREE;
2683 if (real_size_in_bytes >= 8)
2684 {
2685 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2686 shadow_ptr_type);
2687 t = shadow;
2688 }
2689 else
2690 {
2691 /* Slow path for 1, 2 and 4 byte accesses. */
2692 /* Test (shadow != 0)
2693 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
2694 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2695 shadow_ptr_type);
2696 gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
2697 gimple_seq seq = NULL;
2698 gimple_seq_add_stmt (&seq, shadow_test);
2699 /* Aligned (>= 8 bytes) can test just
2700 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
2701 to be 0. */
2702 if (align < 8)
2703 {
2704 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2705 base_addr, 7));
2706 gimple_seq_add_stmt (&seq,
2707 build_type_cast (shadow_type,
2708 gimple_seq_last (seq)));
2709 if (real_size_in_bytes > 1)
2710 gimple_seq_add_stmt (&seq,
2711 build_assign (PLUS_EXPR,
2712 gimple_seq_last (seq),
2713 real_size_in_bytes - 1));
2714 t = gimple_assign_lhs (gimple_seq_last_stmt (seq));
2715 }
2716 else
2717 t = build_int_cst (shadow_type, real_size_in_bytes - 1);
2718 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow));
2719 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2720 gimple_seq_last (seq)));
2721 t = gimple_assign_lhs (gimple_seq_last (seq));
2722 gimple_seq_set_location (seq, loc);
2723 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2724
2725 /* For non-constant, misaligned or otherwise weird access sizes,
2726 check first and last byte. */
2727 if (size_in_bytes == -1)
2728 {
2729 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2730 MINUS_EXPR, len,
2731 build_int_cst (pointer_sized_int_node, 1));
2732 gimple_set_location (g, loc);
2733 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2734 tree last = gimple_assign_lhs (g);
2735 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2736 PLUS_EXPR, base_addr, last);
2737 gimple_set_location (g, loc);
2738 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2739 tree base_end_addr = gimple_assign_lhs (g);
2740
2741 tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
2742 shadow_ptr_type);
2743 gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
2744 gimple_seq seq = NULL;
2745 gimple_seq_add_stmt (&seq, shadow_test);
2746 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2747 base_end_addr, 7));
2748 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
2749 gimple_seq_last (seq)));
2750 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
2751 gimple_seq_last (seq),
2752 shadow));
2753 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2754 gimple_seq_last (seq)));
2755 gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
2756 gimple_seq_last (seq)));
2757 t = gimple_assign_lhs (gimple_seq_last (seq));
2758 gimple_seq_set_location (seq, loc);
2759 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2760 }
2761 }
2762
2763 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
2764 NULL_TREE, NULL_TREE);
2765 gimple_set_location (g, loc);
2766 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2767
2768 /* Generate call to the run-time library (e.g. __asan_report_load8). */
2769 gsi = gsi_start_bb (then_bb);
2770 int nargs;
2771 tree fun = report_error_func (is_store, recover_p, size_in_bytes, &nargs);
2772 g = gimple_build_call (fun, nargs, base_addr, len);
2773 gimple_set_location (g, loc);
2774 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2775
2776 gsi_remove (iter, true);
2777 *iter = gsi_start_bb (else_bb);
2778
2779 return true;
2780 }
2781
2782 /* Instrument the current function. */
2783
2784 static unsigned int
2785 asan_instrument (void)
2786 {
2787 if (shadow_ptr_types[0] == NULL_TREE)
2788 asan_init_shadow_ptr_types ();
2789 transform_statements ();
2790 return 0;
2791 }
2792
2793 static bool
2794 gate_asan (void)
2795 {
2796 return (flag_sanitize & SANITIZE_ADDRESS) != 0
2797 && !lookup_attribute ("no_sanitize_address",
2798 DECL_ATTRIBUTES (current_function_decl));
2799 }
2800
2801 namespace {
2802
2803 const pass_data pass_data_asan =
2804 {
2805 GIMPLE_PASS, /* type */
2806 "asan", /* name */
2807 OPTGROUP_NONE, /* optinfo_flags */
2808 TV_NONE, /* tv_id */
2809 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2810 0, /* properties_provided */
2811 0, /* properties_destroyed */
2812 0, /* todo_flags_start */
2813 TODO_update_ssa, /* todo_flags_finish */
2814 };
2815
2816 class pass_asan : public gimple_opt_pass
2817 {
2818 public:
2819 pass_asan (gcc::context *ctxt)
2820 : gimple_opt_pass (pass_data_asan, ctxt)
2821 {}
2822
2823 /* opt_pass methods: */
2824 opt_pass * clone () { return new pass_asan (m_ctxt); }
2825 virtual bool gate (function *) { return gate_asan (); }
2826 virtual unsigned int execute (function *) { return asan_instrument (); }
2827
2828 }; // class pass_asan
2829
2830 } // anon namespace
2831
2832 gimple_opt_pass *
2833 make_pass_asan (gcc::context *ctxt)
2834 {
2835 return new pass_asan (ctxt);
2836 }
2837
2838 namespace {
2839
2840 const pass_data pass_data_asan_O0 =
2841 {
2842 GIMPLE_PASS, /* type */
2843 "asan0", /* name */
2844 OPTGROUP_NONE, /* optinfo_flags */
2845 TV_NONE, /* tv_id */
2846 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2847 0, /* properties_provided */
2848 0, /* properties_destroyed */
2849 0, /* todo_flags_start */
2850 TODO_update_ssa, /* todo_flags_finish */
2851 };
2852
2853 class pass_asan_O0 : public gimple_opt_pass
2854 {
2855 public:
2856 pass_asan_O0 (gcc::context *ctxt)
2857 : gimple_opt_pass (pass_data_asan_O0, ctxt)
2858 {}
2859
2860 /* opt_pass methods: */
2861 virtual bool gate (function *) { return !optimize && gate_asan (); }
2862 virtual unsigned int execute (function *) { return asan_instrument (); }
2863
2864 }; // class pass_asan_O0
2865
2866 } // anon namespace
2867
2868 gimple_opt_pass *
2869 make_pass_asan_O0 (gcc::context *ctxt)
2870 {
2871 return new pass_asan_O0 (ctxt);
2872 }
2873
2874 #include "gt-asan.h"