Allow to override Asan shadow offset.
[gcc.git] / gcc / asan.c
1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2014 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tree.h"
26 #include "hash-table.h"
27 #include "predict.h"
28 #include "vec.h"
29 #include "hashtab.h"
30 #include "hash-set.h"
31 #include "machmode.h"
32 #include "tm.h"
33 #include "hard-reg-set.h"
34 #include "input.h"
35 #include "function.h"
36 #include "dominance.h"
37 #include "cfg.h"
38 #include "cfganal.h"
39 #include "basic-block.h"
40 #include "tree-ssa-alias.h"
41 #include "internal-fn.h"
42 #include "gimple-expr.h"
43 #include "is-a.h"
44 #include "inchash.h"
45 #include "gimple.h"
46 #include "gimplify.h"
47 #include "gimple-iterator.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stor-layout.h"
51 #include "tree-iterator.h"
52 #include "cgraph.h"
53 #include "stringpool.h"
54 #include "tree-ssanames.h"
55 #include "tree-pass.h"
56 #include "asan.h"
57 #include "gimple-pretty-print.h"
58 #include "target.h"
59 #include "expr.h"
60 #include "optabs.h"
61 #include "output.h"
62 #include "tm_p.h"
63 #include "langhooks.h"
64 #include "alloc-pool.h"
65 #include "cfgloop.h"
66 #include "gimple-builder.h"
67 #include "ubsan.h"
68 #include "params.h"
69 #include "builtins.h"
70
71 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
72 with <2x slowdown on average.
73
74 The tool consists of two parts:
75 instrumentation module (this file) and a run-time library.
76 The instrumentation module adds a run-time check before every memory insn.
77 For a 8- or 16- byte load accessing address X:
78 ShadowAddr = (X >> 3) + Offset
79 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
80 if (ShadowValue)
81 __asan_report_load8(X);
82 For a load of N bytes (N=1, 2 or 4) from address X:
83 ShadowAddr = (X >> 3) + Offset
84 ShadowValue = *(char*)ShadowAddr;
85 if (ShadowValue)
86 if ((X & 7) + N - 1 > ShadowValue)
87 __asan_report_loadN(X);
88 Stores are instrumented similarly, but using __asan_report_storeN functions.
89 A call too __asan_init_vN() is inserted to the list of module CTORs.
90 N is the version number of the AddressSanitizer API. The changes between the
91 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
92
93 The run-time library redefines malloc (so that redzone are inserted around
94 the allocated memory) and free (so that reuse of free-ed memory is delayed),
95 provides __asan_report* and __asan_init_vN functions.
96
97 Read more:
98 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
99
100 The current implementation supports detection of out-of-bounds and
101 use-after-free in the heap, on the stack and for global variables.
102
103 [Protection of stack variables]
104
105 To understand how detection of out-of-bounds and use-after-free works
106 for stack variables, lets look at this example on x86_64 where the
107 stack grows downward:
108
109 int
110 foo ()
111 {
112 char a[23] = {0};
113 int b[2] = {0};
114
115 a[5] = 1;
116 b[1] = 2;
117
118 return a[5] + b[1];
119 }
120
121 For this function, the stack protected by asan will be organized as
122 follows, from the top of the stack to the bottom:
123
124 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
125
126 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
127 the next slot be 32 bytes aligned; this one is called Partial
128 Redzone; this 32 bytes alignment is an asan constraint]
129
130 Slot 3/ [24 bytes for variable 'a']
131
132 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
133
134 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
135
136 Slot 6/ [8 bytes for variable 'b']
137
138 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
139 'LEFT RedZone']
140
141 The 32 bytes of LEFT red zone at the bottom of the stack can be
142 decomposed as such:
143
144 1/ The first 8 bytes contain a magical asan number that is always
145 0x41B58AB3.
146
147 2/ The following 8 bytes contains a pointer to a string (to be
148 parsed at runtime by the runtime asan library), which format is
149 the following:
150
151 "<function-name> <space> <num-of-variables-on-the-stack>
152 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
153 <length-of-var-in-bytes> ){n} "
154
155 where '(...){n}' means the content inside the parenthesis occurs 'n'
156 times, with 'n' being the number of variables on the stack.
157
158 3/ The following 8 bytes contain the PC of the current function which
159 will be used by the run-time library to print an error message.
160
161 4/ The following 8 bytes are reserved for internal use by the run-time.
162
163 The shadow memory for that stack layout is going to look like this:
164
165 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
166 The F1 byte pattern is a magic number called
167 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
168 the memory for that shadow byte is part of a the LEFT red zone
169 intended to seat at the bottom of the variables on the stack.
170
171 - content of shadow memory 8 bytes for slots 6 and 5:
172 0xF4F4F400. The F4 byte pattern is a magic number
173 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
174 memory region for this shadow byte is a PARTIAL red zone
175 intended to pad a variable A, so that the slot following
176 {A,padding} is 32 bytes aligned.
177
178 Note that the fact that the least significant byte of this
179 shadow memory content is 00 means that 8 bytes of its
180 corresponding memory (which corresponds to the memory of
181 variable 'b') is addressable.
182
183 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
184 The F2 byte pattern is a magic number called
185 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
186 region for this shadow byte is a MIDDLE red zone intended to
187 seat between two 32 aligned slots of {variable,padding}.
188
189 - content of shadow memory 8 bytes for slot 3 and 2:
190 0xF4000000. This represents is the concatenation of
191 variable 'a' and the partial red zone following it, like what we
192 had for variable 'b'. The least significant 3 bytes being 00
193 means that the 3 bytes of variable 'a' are addressable.
194
195 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
196 The F3 byte pattern is a magic number called
197 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
198 region for this shadow byte is a RIGHT red zone intended to seat
199 at the top of the variables of the stack.
200
201 Note that the real variable layout is done in expand_used_vars in
202 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
203 stack variables as well as the different red zones, emits some
204 prologue code to populate the shadow memory as to poison (mark as
205 non-accessible) the regions of the red zones and mark the regions of
206 stack variables as accessible, and emit some epilogue code to
207 un-poison (mark as accessible) the regions of red zones right before
208 the function exits.
209
210 [Protection of global variables]
211
212 The basic idea is to insert a red zone between two global variables
213 and install a constructor function that calls the asan runtime to do
214 the populating of the relevant shadow memory regions at load time.
215
216 So the global variables are laid out as to insert a red zone between
217 them. The size of the red zones is so that each variable starts on a
218 32 bytes boundary.
219
220 Then a constructor function is installed so that, for each global
221 variable, it calls the runtime asan library function
222 __asan_register_globals_with an instance of this type:
223
224 struct __asan_global
225 {
226 // Address of the beginning of the global variable.
227 const void *__beg;
228
229 // Initial size of the global variable.
230 uptr __size;
231
232 // Size of the global variable + size of the red zone. This
233 // size is 32 bytes aligned.
234 uptr __size_with_redzone;
235
236 // Name of the global variable.
237 const void *__name;
238
239 // Name of the module where the global variable is declared.
240 const void *__module_name;
241
242 // 1 if it has dynamic initialization, 0 otherwise.
243 uptr __has_dynamic_init;
244
245 // A pointer to struct that contains source location, could be NULL.
246 __asan_global_source_location *__location;
247 }
248
249 A destructor function that calls the runtime asan library function
250 _asan_unregister_globals is also installed. */
251
252 static unsigned HOST_WIDE_INT asan_shadow_offset_value;
253 static bool asan_shadow_offset_computed;
254
255 /* Sets shadow offset to value in string VAL. */
256
257 bool
258 set_asan_shadow_offset (const char *val)
259 {
260 char *endp;
261
262 errno = 0;
263 #ifdef HAVE_LONG_LONG
264 asan_shadow_offset_value = strtoull (val, &endp, 0);
265 #else
266 asan_shadow_offset_value = strtoul (val, &endp, 0);
267 #endif
268 if (!(*val != '\0' && *endp == '\0' && errno == 0))
269 return false;
270
271 asan_shadow_offset_computed = true;
272
273 return true;
274 }
275
276 /* Returns Asan shadow offset. */
277
278 static unsigned HOST_WIDE_INT
279 asan_shadow_offset ()
280 {
281 if (!asan_shadow_offset_computed)
282 {
283 asan_shadow_offset_computed = true;
284 asan_shadow_offset_value = targetm.asan_shadow_offset ();
285 }
286 return asan_shadow_offset_value;
287 }
288
289 alias_set_type asan_shadow_set = -1;
290
291 /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
292 alias set is used for all shadow memory accesses. */
293 static GTY(()) tree shadow_ptr_types[2];
294
295 /* Decl for __asan_option_detect_stack_use_after_return. */
296 static GTY(()) tree asan_detect_stack_use_after_return;
297
298 /* Various flags for Asan builtins. */
299 enum asan_check_flags
300 {
301 ASAN_CHECK_STORE = 1 << 0,
302 ASAN_CHECK_SCALAR_ACCESS = 1 << 1,
303 ASAN_CHECK_NON_ZERO_LEN = 1 << 2,
304 ASAN_CHECK_START_INSTRUMENTED = 1 << 3,
305 ASAN_CHECK_END_INSTRUMENTED = 1 << 4,
306 ASAN_CHECK_LAST
307 };
308
309 /* Hashtable support for memory references used by gimple
310 statements. */
311
312 /* This type represents a reference to a memory region. */
313 struct asan_mem_ref
314 {
315 /* The expression of the beginning of the memory region. */
316 tree start;
317
318 /* The size of the access. */
319 HOST_WIDE_INT access_size;
320 };
321
322 static alloc_pool asan_mem_ref_alloc_pool;
323
324 /* This creates the alloc pool used to store the instances of
325 asan_mem_ref that are stored in the hash table asan_mem_ref_ht. */
326
327 static alloc_pool
328 asan_mem_ref_get_alloc_pool ()
329 {
330 if (asan_mem_ref_alloc_pool == NULL)
331 asan_mem_ref_alloc_pool = create_alloc_pool ("asan_mem_ref",
332 sizeof (asan_mem_ref),
333 10);
334 return asan_mem_ref_alloc_pool;
335
336 }
337
338 /* Initializes an instance of asan_mem_ref. */
339
340 static void
341 asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
342 {
343 ref->start = start;
344 ref->access_size = access_size;
345 }
346
347 /* Allocates memory for an instance of asan_mem_ref into the memory
348 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
349 START is the address of (or the expression pointing to) the
350 beginning of memory reference. ACCESS_SIZE is the size of the
351 access to the referenced memory. */
352
353 static asan_mem_ref*
354 asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
355 {
356 asan_mem_ref *ref =
357 (asan_mem_ref *) pool_alloc (asan_mem_ref_get_alloc_pool ());
358
359 asan_mem_ref_init (ref, start, access_size);
360 return ref;
361 }
362
363 /* This builds and returns a pointer to the end of the memory region
364 that starts at START and of length LEN. */
365
366 tree
367 asan_mem_ref_get_end (tree start, tree len)
368 {
369 if (len == NULL_TREE || integer_zerop (len))
370 return start;
371
372 if (!ptrofftype_p (len))
373 len = convert_to_ptrofftype (len);
374
375 return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
376 }
377
378 /* Return a tree expression that represents the end of the referenced
379 memory region. Beware that this function can actually build a new
380 tree expression. */
381
382 tree
383 asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
384 {
385 return asan_mem_ref_get_end (ref->start, len);
386 }
387
388 struct asan_mem_ref_hasher
389 : typed_noop_remove <asan_mem_ref>
390 {
391 typedef asan_mem_ref value_type;
392 typedef asan_mem_ref compare_type;
393
394 static inline hashval_t hash (const value_type *);
395 static inline bool equal (const value_type *, const compare_type *);
396 };
397
398 /* Hash a memory reference. */
399
400 inline hashval_t
401 asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
402 {
403 inchash::hash hstate;
404 inchash::add_expr (mem_ref->start, hstate);
405 hstate.add_wide_int (mem_ref->access_size);
406 return hstate.end ();
407 }
408
409 /* Compare two memory references. We accept the length of either
410 memory references to be NULL_TREE. */
411
412 inline bool
413 asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
414 const asan_mem_ref *m2)
415 {
416 return (m1->access_size == m2->access_size
417 && operand_equal_p (m1->start, m2->start, 0));
418 }
419
420 static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
421
422 /* Returns a reference to the hash table containing memory references.
423 This function ensures that the hash table is created. Note that
424 this hash table is updated by the function
425 update_mem_ref_hash_table. */
426
427 static hash_table<asan_mem_ref_hasher> *
428 get_mem_ref_hash_table ()
429 {
430 if (!asan_mem_ref_ht)
431 asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
432
433 return asan_mem_ref_ht;
434 }
435
436 /* Clear all entries from the memory references hash table. */
437
438 static void
439 empty_mem_ref_hash_table ()
440 {
441 if (asan_mem_ref_ht)
442 asan_mem_ref_ht->empty ();
443 }
444
445 /* Free the memory references hash table. */
446
447 static void
448 free_mem_ref_resources ()
449 {
450 delete asan_mem_ref_ht;
451 asan_mem_ref_ht = NULL;
452
453 if (asan_mem_ref_alloc_pool)
454 {
455 free_alloc_pool (asan_mem_ref_alloc_pool);
456 asan_mem_ref_alloc_pool = NULL;
457 }
458 }
459
460 /* Return true iff the memory reference REF has been instrumented. */
461
462 static bool
463 has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
464 {
465 asan_mem_ref r;
466 asan_mem_ref_init (&r, ref, access_size);
467
468 return (get_mem_ref_hash_table ()->find (&r) != NULL);
469 }
470
471 /* Return true iff the memory reference REF has been instrumented. */
472
473 static bool
474 has_mem_ref_been_instrumented (const asan_mem_ref *ref)
475 {
476 return has_mem_ref_been_instrumented (ref->start, ref->access_size);
477 }
478
479 /* Return true iff access to memory region starting at REF and of
480 length LEN has been instrumented. */
481
482 static bool
483 has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
484 {
485 /* First let's see if the address of the beginning of REF has been
486 instrumented. */
487 if (!has_mem_ref_been_instrumented (ref))
488 return false;
489
490 if (len != 0)
491 {
492 /* Let's see if the end of the region has been instrumented. */
493 if (!has_mem_ref_been_instrumented (asan_mem_ref_get_end (ref, len),
494 ref->access_size))
495 return false;
496 }
497 return true;
498 }
499
500 /* Set REF to the memory reference present in a gimple assignment
501 ASSIGNMENT. Return true upon successful completion, false
502 otherwise. */
503
504 static bool
505 get_mem_ref_of_assignment (const gimple assignment,
506 asan_mem_ref *ref,
507 bool *ref_is_store)
508 {
509 gcc_assert (gimple_assign_single_p (assignment));
510
511 if (gimple_store_p (assignment)
512 && !gimple_clobber_p (assignment))
513 {
514 ref->start = gimple_assign_lhs (assignment);
515 *ref_is_store = true;
516 }
517 else if (gimple_assign_load_p (assignment))
518 {
519 ref->start = gimple_assign_rhs1 (assignment);
520 *ref_is_store = false;
521 }
522 else
523 return false;
524
525 ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
526 return true;
527 }
528
529 /* Return the memory references contained in a gimple statement
530 representing a builtin call that has to do with memory access. */
531
532 static bool
533 get_mem_refs_of_builtin_call (const gimple call,
534 asan_mem_ref *src0,
535 tree *src0_len,
536 bool *src0_is_store,
537 asan_mem_ref *src1,
538 tree *src1_len,
539 bool *src1_is_store,
540 asan_mem_ref *dst,
541 tree *dst_len,
542 bool *dst_is_store,
543 bool *dest_is_deref)
544 {
545 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
546
547 tree callee = gimple_call_fndecl (call);
548 tree source0 = NULL_TREE, source1 = NULL_TREE,
549 dest = NULL_TREE, len = NULL_TREE;
550 bool is_store = true, got_reference_p = false;
551 HOST_WIDE_INT access_size = 1;
552
553 switch (DECL_FUNCTION_CODE (callee))
554 {
555 /* (s, s, n) style memops. */
556 case BUILT_IN_BCMP:
557 case BUILT_IN_MEMCMP:
558 source0 = gimple_call_arg (call, 0);
559 source1 = gimple_call_arg (call, 1);
560 len = gimple_call_arg (call, 2);
561 break;
562
563 /* (src, dest, n) style memops. */
564 case BUILT_IN_BCOPY:
565 source0 = gimple_call_arg (call, 0);
566 dest = gimple_call_arg (call, 1);
567 len = gimple_call_arg (call, 2);
568 break;
569
570 /* (dest, src, n) style memops. */
571 case BUILT_IN_MEMCPY:
572 case BUILT_IN_MEMCPY_CHK:
573 case BUILT_IN_MEMMOVE:
574 case BUILT_IN_MEMMOVE_CHK:
575 case BUILT_IN_MEMPCPY:
576 case BUILT_IN_MEMPCPY_CHK:
577 dest = gimple_call_arg (call, 0);
578 source0 = gimple_call_arg (call, 1);
579 len = gimple_call_arg (call, 2);
580 break;
581
582 /* (dest, n) style memops. */
583 case BUILT_IN_BZERO:
584 dest = gimple_call_arg (call, 0);
585 len = gimple_call_arg (call, 1);
586 break;
587
588 /* (dest, x, n) style memops*/
589 case BUILT_IN_MEMSET:
590 case BUILT_IN_MEMSET_CHK:
591 dest = gimple_call_arg (call, 0);
592 len = gimple_call_arg (call, 2);
593 break;
594
595 case BUILT_IN_STRLEN:
596 source0 = gimple_call_arg (call, 0);
597 len = gimple_call_lhs (call);
598 break ;
599
600 /* And now the __atomic* and __sync builtins.
601 These are handled differently from the classical memory memory
602 access builtins above. */
603
604 case BUILT_IN_ATOMIC_LOAD_1:
605 case BUILT_IN_ATOMIC_LOAD_2:
606 case BUILT_IN_ATOMIC_LOAD_4:
607 case BUILT_IN_ATOMIC_LOAD_8:
608 case BUILT_IN_ATOMIC_LOAD_16:
609 is_store = false;
610 /* fall through. */
611
612 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
613 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
614 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
615 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
616 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
617
618 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
619 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
620 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
621 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
622 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
623
624 case BUILT_IN_SYNC_FETCH_AND_OR_1:
625 case BUILT_IN_SYNC_FETCH_AND_OR_2:
626 case BUILT_IN_SYNC_FETCH_AND_OR_4:
627 case BUILT_IN_SYNC_FETCH_AND_OR_8:
628 case BUILT_IN_SYNC_FETCH_AND_OR_16:
629
630 case BUILT_IN_SYNC_FETCH_AND_AND_1:
631 case BUILT_IN_SYNC_FETCH_AND_AND_2:
632 case BUILT_IN_SYNC_FETCH_AND_AND_4:
633 case BUILT_IN_SYNC_FETCH_AND_AND_8:
634 case BUILT_IN_SYNC_FETCH_AND_AND_16:
635
636 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
637 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
638 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
639 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
640 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
641
642 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
643 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
644 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
645 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
646
647 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
648 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
649 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
650 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
651 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
652
653 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
654 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
655 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
656 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
657 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
658
659 case BUILT_IN_SYNC_OR_AND_FETCH_1:
660 case BUILT_IN_SYNC_OR_AND_FETCH_2:
661 case BUILT_IN_SYNC_OR_AND_FETCH_4:
662 case BUILT_IN_SYNC_OR_AND_FETCH_8:
663 case BUILT_IN_SYNC_OR_AND_FETCH_16:
664
665 case BUILT_IN_SYNC_AND_AND_FETCH_1:
666 case BUILT_IN_SYNC_AND_AND_FETCH_2:
667 case BUILT_IN_SYNC_AND_AND_FETCH_4:
668 case BUILT_IN_SYNC_AND_AND_FETCH_8:
669 case BUILT_IN_SYNC_AND_AND_FETCH_16:
670
671 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
672 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
673 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
674 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
675 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
676
677 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
678 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
679 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
680 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
681
682 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
683 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
684 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
685 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
686 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
687
688 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
689 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
690 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
691 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
692 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
693
694 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
695 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
696 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
697 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
698 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
699
700 case BUILT_IN_SYNC_LOCK_RELEASE_1:
701 case BUILT_IN_SYNC_LOCK_RELEASE_2:
702 case BUILT_IN_SYNC_LOCK_RELEASE_4:
703 case BUILT_IN_SYNC_LOCK_RELEASE_8:
704 case BUILT_IN_SYNC_LOCK_RELEASE_16:
705
706 case BUILT_IN_ATOMIC_EXCHANGE_1:
707 case BUILT_IN_ATOMIC_EXCHANGE_2:
708 case BUILT_IN_ATOMIC_EXCHANGE_4:
709 case BUILT_IN_ATOMIC_EXCHANGE_8:
710 case BUILT_IN_ATOMIC_EXCHANGE_16:
711
712 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
713 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
714 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
715 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
716 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
717
718 case BUILT_IN_ATOMIC_STORE_1:
719 case BUILT_IN_ATOMIC_STORE_2:
720 case BUILT_IN_ATOMIC_STORE_4:
721 case BUILT_IN_ATOMIC_STORE_8:
722 case BUILT_IN_ATOMIC_STORE_16:
723
724 case BUILT_IN_ATOMIC_ADD_FETCH_1:
725 case BUILT_IN_ATOMIC_ADD_FETCH_2:
726 case BUILT_IN_ATOMIC_ADD_FETCH_4:
727 case BUILT_IN_ATOMIC_ADD_FETCH_8:
728 case BUILT_IN_ATOMIC_ADD_FETCH_16:
729
730 case BUILT_IN_ATOMIC_SUB_FETCH_1:
731 case BUILT_IN_ATOMIC_SUB_FETCH_2:
732 case BUILT_IN_ATOMIC_SUB_FETCH_4:
733 case BUILT_IN_ATOMIC_SUB_FETCH_8:
734 case BUILT_IN_ATOMIC_SUB_FETCH_16:
735
736 case BUILT_IN_ATOMIC_AND_FETCH_1:
737 case BUILT_IN_ATOMIC_AND_FETCH_2:
738 case BUILT_IN_ATOMIC_AND_FETCH_4:
739 case BUILT_IN_ATOMIC_AND_FETCH_8:
740 case BUILT_IN_ATOMIC_AND_FETCH_16:
741
742 case BUILT_IN_ATOMIC_NAND_FETCH_1:
743 case BUILT_IN_ATOMIC_NAND_FETCH_2:
744 case BUILT_IN_ATOMIC_NAND_FETCH_4:
745 case BUILT_IN_ATOMIC_NAND_FETCH_8:
746 case BUILT_IN_ATOMIC_NAND_FETCH_16:
747
748 case BUILT_IN_ATOMIC_XOR_FETCH_1:
749 case BUILT_IN_ATOMIC_XOR_FETCH_2:
750 case BUILT_IN_ATOMIC_XOR_FETCH_4:
751 case BUILT_IN_ATOMIC_XOR_FETCH_8:
752 case BUILT_IN_ATOMIC_XOR_FETCH_16:
753
754 case BUILT_IN_ATOMIC_OR_FETCH_1:
755 case BUILT_IN_ATOMIC_OR_FETCH_2:
756 case BUILT_IN_ATOMIC_OR_FETCH_4:
757 case BUILT_IN_ATOMIC_OR_FETCH_8:
758 case BUILT_IN_ATOMIC_OR_FETCH_16:
759
760 case BUILT_IN_ATOMIC_FETCH_ADD_1:
761 case BUILT_IN_ATOMIC_FETCH_ADD_2:
762 case BUILT_IN_ATOMIC_FETCH_ADD_4:
763 case BUILT_IN_ATOMIC_FETCH_ADD_8:
764 case BUILT_IN_ATOMIC_FETCH_ADD_16:
765
766 case BUILT_IN_ATOMIC_FETCH_SUB_1:
767 case BUILT_IN_ATOMIC_FETCH_SUB_2:
768 case BUILT_IN_ATOMIC_FETCH_SUB_4:
769 case BUILT_IN_ATOMIC_FETCH_SUB_8:
770 case BUILT_IN_ATOMIC_FETCH_SUB_16:
771
772 case BUILT_IN_ATOMIC_FETCH_AND_1:
773 case BUILT_IN_ATOMIC_FETCH_AND_2:
774 case BUILT_IN_ATOMIC_FETCH_AND_4:
775 case BUILT_IN_ATOMIC_FETCH_AND_8:
776 case BUILT_IN_ATOMIC_FETCH_AND_16:
777
778 case BUILT_IN_ATOMIC_FETCH_NAND_1:
779 case BUILT_IN_ATOMIC_FETCH_NAND_2:
780 case BUILT_IN_ATOMIC_FETCH_NAND_4:
781 case BUILT_IN_ATOMIC_FETCH_NAND_8:
782 case BUILT_IN_ATOMIC_FETCH_NAND_16:
783
784 case BUILT_IN_ATOMIC_FETCH_XOR_1:
785 case BUILT_IN_ATOMIC_FETCH_XOR_2:
786 case BUILT_IN_ATOMIC_FETCH_XOR_4:
787 case BUILT_IN_ATOMIC_FETCH_XOR_8:
788 case BUILT_IN_ATOMIC_FETCH_XOR_16:
789
790 case BUILT_IN_ATOMIC_FETCH_OR_1:
791 case BUILT_IN_ATOMIC_FETCH_OR_2:
792 case BUILT_IN_ATOMIC_FETCH_OR_4:
793 case BUILT_IN_ATOMIC_FETCH_OR_8:
794 case BUILT_IN_ATOMIC_FETCH_OR_16:
795 {
796 dest = gimple_call_arg (call, 0);
797 /* DEST represents the address of a memory location.
798 instrument_derefs wants the memory location, so lets
799 dereference the address DEST before handing it to
800 instrument_derefs. */
801 if (TREE_CODE (dest) == ADDR_EXPR)
802 dest = TREE_OPERAND (dest, 0);
803 else if (TREE_CODE (dest) == SSA_NAME || TREE_CODE (dest) == INTEGER_CST)
804 dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
805 dest, build_int_cst (TREE_TYPE (dest), 0));
806 else
807 gcc_unreachable ();
808
809 access_size = int_size_in_bytes (TREE_TYPE (dest));
810 }
811
812 default:
813 /* The other builtins memory access are not instrumented in this
814 function because they either don't have any length parameter,
815 or their length parameter is just a limit. */
816 break;
817 }
818
819 if (len != NULL_TREE)
820 {
821 if (source0 != NULL_TREE)
822 {
823 src0->start = source0;
824 src0->access_size = access_size;
825 *src0_len = len;
826 *src0_is_store = false;
827 }
828
829 if (source1 != NULL_TREE)
830 {
831 src1->start = source1;
832 src1->access_size = access_size;
833 *src1_len = len;
834 *src1_is_store = false;
835 }
836
837 if (dest != NULL_TREE)
838 {
839 dst->start = dest;
840 dst->access_size = access_size;
841 *dst_len = len;
842 *dst_is_store = true;
843 }
844
845 got_reference_p = true;
846 }
847 else if (dest)
848 {
849 dst->start = dest;
850 dst->access_size = access_size;
851 *dst_len = NULL_TREE;
852 *dst_is_store = is_store;
853 *dest_is_deref = true;
854 got_reference_p = true;
855 }
856
857 return got_reference_p;
858 }
859
860 /* Return true iff a given gimple statement has been instrumented.
861 Note that the statement is "defined" by the memory references it
862 contains. */
863
864 static bool
865 has_stmt_been_instrumented_p (gimple stmt)
866 {
867 if (gimple_assign_single_p (stmt))
868 {
869 bool r_is_store;
870 asan_mem_ref r;
871 asan_mem_ref_init (&r, NULL, 1);
872
873 if (get_mem_ref_of_assignment (stmt, &r, &r_is_store))
874 return has_mem_ref_been_instrumented (&r);
875 }
876 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
877 {
878 asan_mem_ref src0, src1, dest;
879 asan_mem_ref_init (&src0, NULL, 1);
880 asan_mem_ref_init (&src1, NULL, 1);
881 asan_mem_ref_init (&dest, NULL, 1);
882
883 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
884 bool src0_is_store = false, src1_is_store = false,
885 dest_is_store = false, dest_is_deref = false;
886 if (get_mem_refs_of_builtin_call (stmt,
887 &src0, &src0_len, &src0_is_store,
888 &src1, &src1_len, &src1_is_store,
889 &dest, &dest_len, &dest_is_store,
890 &dest_is_deref))
891 {
892 if (src0.start != NULL_TREE
893 && !has_mem_ref_been_instrumented (&src0, src0_len))
894 return false;
895
896 if (src1.start != NULL_TREE
897 && !has_mem_ref_been_instrumented (&src1, src1_len))
898 return false;
899
900 if (dest.start != NULL_TREE
901 && !has_mem_ref_been_instrumented (&dest, dest_len))
902 return false;
903
904 return true;
905 }
906 }
907 return false;
908 }
909
910 /* Insert a memory reference into the hash table. */
911
912 static void
913 update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
914 {
915 hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
916
917 asan_mem_ref r;
918 asan_mem_ref_init (&r, ref, access_size);
919
920 asan_mem_ref **slot = ht->find_slot (&r, INSERT);
921 if (*slot == NULL)
922 *slot = asan_mem_ref_new (ref, access_size);
923 }
924
925 /* Initialize shadow_ptr_types array. */
926
927 static void
928 asan_init_shadow_ptr_types (void)
929 {
930 asan_shadow_set = new_alias_set ();
931 shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
932 TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
933 shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
934 shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
935 TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
936 shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
937 initialize_sanitizer_builtins ();
938 }
939
940 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
941
942 static tree
943 asan_pp_string (pretty_printer *pp)
944 {
945 const char *buf = pp_formatted_text (pp);
946 size_t len = strlen (buf);
947 tree ret = build_string (len + 1, buf);
948 TREE_TYPE (ret)
949 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
950 build_index_type (size_int (len)));
951 TREE_READONLY (ret) = 1;
952 TREE_STATIC (ret) = 1;
953 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
954 }
955
956 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
957
958 static rtx
959 asan_shadow_cst (unsigned char shadow_bytes[4])
960 {
961 int i;
962 unsigned HOST_WIDE_INT val = 0;
963 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
964 for (i = 0; i < 4; i++)
965 val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
966 << (BITS_PER_UNIT * i);
967 return gen_int_mode (val, SImode);
968 }
969
970 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
971 though. */
972
973 static void
974 asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
975 {
976 rtx_insn *insn, *insns, *jump;
977 rtx_code_label *top_label;
978 rtx end, addr, tmp;
979
980 start_sequence ();
981 clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
982 insns = get_insns ();
983 end_sequence ();
984 for (insn = insns; insn; insn = NEXT_INSN (insn))
985 if (CALL_P (insn))
986 break;
987 if (insn == NULL_RTX)
988 {
989 emit_insn (insns);
990 return;
991 }
992
993 gcc_assert ((len & 3) == 0);
994 top_label = gen_label_rtx ();
995 addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
996 shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
997 end = force_reg (Pmode, plus_constant (Pmode, addr, len));
998 emit_label (top_label);
999
1000 emit_move_insn (shadow_mem, const0_rtx);
1001 tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
1002 true, OPTAB_LIB_WIDEN);
1003 if (tmp != addr)
1004 emit_move_insn (addr, tmp);
1005 emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
1006 jump = get_last_insn ();
1007 gcc_assert (JUMP_P (jump));
1008 add_int_reg_note (jump, REG_BR_PROB, REG_BR_PROB_BASE * 80 / 100);
1009 }
1010
1011 void
1012 asan_function_start (void)
1013 {
1014 section *fnsec = function_section (current_function_decl);
1015 switch_to_section (fnsec);
1016 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
1017 current_function_funcdef_no);
1018 }
1019
1020 /* Insert code to protect stack vars. The prologue sequence should be emitted
1021 directly, epilogue sequence returned. BASE is the register holding the
1022 stack base, against which OFFSETS array offsets are relative to, OFFSETS
1023 array contains pairs of offsets in reverse order, always the end offset
1024 of some gap that needs protection followed by starting offset,
1025 and DECLS is an array of representative decls for each var partition.
1026 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1027 elements long (OFFSETS include gap before the first variable as well
1028 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
1029 register which stack vars DECL_RTLs are based on. Either BASE should be
1030 assigned to PBASE, when not doing use after return protection, or
1031 corresponding address based on __asan_stack_malloc* return value. */
1032
1033 rtx_insn *
1034 asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
1035 HOST_WIDE_INT *offsets, tree *decls, int length)
1036 {
1037 rtx shadow_base, shadow_mem, ret, mem, orig_base;
1038 rtx_code_label *lab;
1039 rtx_insn *insns;
1040 char buf[30];
1041 unsigned char shadow_bytes[4];
1042 HOST_WIDE_INT base_offset = offsets[length - 1];
1043 HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
1044 HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
1045 HOST_WIDE_INT last_offset, last_size;
1046 int l;
1047 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
1048 tree str_cst, decl, id;
1049 int use_after_return_class = -1;
1050
1051 if (shadow_ptr_types[0] == NULL_TREE)
1052 asan_init_shadow_ptr_types ();
1053
1054 /* First of all, prepare the description string. */
1055 pretty_printer asan_pp;
1056
1057 pp_decimal_int (&asan_pp, length / 2 - 1);
1058 pp_space (&asan_pp);
1059 for (l = length - 2; l; l -= 2)
1060 {
1061 tree decl = decls[l / 2 - 1];
1062 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
1063 pp_space (&asan_pp);
1064 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
1065 pp_space (&asan_pp);
1066 if (DECL_P (decl) && DECL_NAME (decl))
1067 {
1068 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
1069 pp_space (&asan_pp);
1070 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
1071 }
1072 else
1073 pp_string (&asan_pp, "9 <unknown>");
1074 pp_space (&asan_pp);
1075 }
1076 str_cst = asan_pp_string (&asan_pp);
1077
1078 /* Emit the prologue sequence. */
1079 if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
1080 && ASAN_USE_AFTER_RETURN)
1081 {
1082 use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
1083 /* __asan_stack_malloc_N guarantees alignment
1084 N < 6 ? (64 << N) : 4096 bytes. */
1085 if (alignb > (use_after_return_class < 6
1086 ? (64U << use_after_return_class) : 4096U))
1087 use_after_return_class = -1;
1088 else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
1089 base_align_bias = ((asan_frame_size + alignb - 1)
1090 & ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
1091 }
1092 /* Align base if target is STRICT_ALIGNMENT. */
1093 if (STRICT_ALIGNMENT)
1094 base = expand_binop (Pmode, and_optab, base,
1095 gen_int_mode (-((GET_MODE_ALIGNMENT (SImode)
1096 << ASAN_SHADOW_SHIFT)
1097 / BITS_PER_UNIT), Pmode), NULL_RTX,
1098 1, OPTAB_DIRECT);
1099
1100 if (use_after_return_class == -1 && pbase)
1101 emit_move_insn (pbase, base);
1102
1103 base = expand_binop (Pmode, add_optab, base,
1104 gen_int_mode (base_offset - base_align_bias, Pmode),
1105 NULL_RTX, 1, OPTAB_DIRECT);
1106 orig_base = NULL_RTX;
1107 if (use_after_return_class != -1)
1108 {
1109 if (asan_detect_stack_use_after_return == NULL_TREE)
1110 {
1111 id = get_identifier ("__asan_option_detect_stack_use_after_return");
1112 decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
1113 integer_type_node);
1114 SET_DECL_ASSEMBLER_NAME (decl, id);
1115 TREE_ADDRESSABLE (decl) = 1;
1116 DECL_ARTIFICIAL (decl) = 1;
1117 DECL_IGNORED_P (decl) = 1;
1118 DECL_EXTERNAL (decl) = 1;
1119 TREE_STATIC (decl) = 1;
1120 TREE_PUBLIC (decl) = 1;
1121 TREE_USED (decl) = 1;
1122 asan_detect_stack_use_after_return = decl;
1123 }
1124 orig_base = gen_reg_rtx (Pmode);
1125 emit_move_insn (orig_base, base);
1126 ret = expand_normal (asan_detect_stack_use_after_return);
1127 lab = gen_label_rtx ();
1128 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1129 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1130 VOIDmode, 0, lab, very_likely);
1131 snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
1132 use_after_return_class);
1133 ret = init_one_libfunc (buf);
1134 rtx addr = convert_memory_address (ptr_mode, base);
1135 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 2,
1136 GEN_INT (asan_frame_size
1137 + base_align_bias),
1138 TYPE_MODE (pointer_sized_int_node),
1139 addr, ptr_mode);
1140 ret = convert_memory_address (Pmode, ret);
1141 emit_move_insn (base, ret);
1142 emit_label (lab);
1143 emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
1144 gen_int_mode (base_align_bias
1145 - base_offset, Pmode),
1146 NULL_RTX, 1, OPTAB_DIRECT));
1147 }
1148 mem = gen_rtx_MEM (ptr_mode, base);
1149 mem = adjust_address (mem, VOIDmode, base_align_bias);
1150 emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
1151 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1152 emit_move_insn (mem, expand_normal (str_cst));
1153 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1154 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
1155 id = get_identifier (buf);
1156 decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1157 VAR_DECL, id, char_type_node);
1158 SET_DECL_ASSEMBLER_NAME (decl, id);
1159 TREE_ADDRESSABLE (decl) = 1;
1160 TREE_READONLY (decl) = 1;
1161 DECL_ARTIFICIAL (decl) = 1;
1162 DECL_IGNORED_P (decl) = 1;
1163 TREE_STATIC (decl) = 1;
1164 TREE_PUBLIC (decl) = 0;
1165 TREE_USED (decl) = 1;
1166 DECL_INITIAL (decl) = decl;
1167 TREE_ASM_WRITTEN (decl) = 1;
1168 TREE_ASM_WRITTEN (id) = 1;
1169 emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
1170 shadow_base = expand_binop (Pmode, lshr_optab, base,
1171 GEN_INT (ASAN_SHADOW_SHIFT),
1172 NULL_RTX, 1, OPTAB_DIRECT);
1173 shadow_base
1174 = plus_constant (Pmode, shadow_base,
1175 asan_shadow_offset ()
1176 + (base_align_bias >> ASAN_SHADOW_SHIFT));
1177 gcc_assert (asan_shadow_set != -1
1178 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1179 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
1180 set_mem_alias_set (shadow_mem, asan_shadow_set);
1181 if (STRICT_ALIGNMENT)
1182 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1183 prev_offset = base_offset;
1184 for (l = length; l; l -= 2)
1185 {
1186 if (l == 2)
1187 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1188 offset = offsets[l - 1];
1189 if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
1190 {
1191 int i;
1192 HOST_WIDE_INT aoff
1193 = base_offset + ((offset - base_offset)
1194 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1195 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1196 (aoff - prev_offset)
1197 >> ASAN_SHADOW_SHIFT);
1198 prev_offset = aoff;
1199 for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
1200 if (aoff < offset)
1201 {
1202 if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
1203 shadow_bytes[i] = 0;
1204 else
1205 shadow_bytes[i] = offset - aoff;
1206 }
1207 else
1208 shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
1209 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1210 offset = aoff;
1211 }
1212 while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
1213 {
1214 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1215 (offset - prev_offset)
1216 >> ASAN_SHADOW_SHIFT);
1217 prev_offset = offset;
1218 memset (shadow_bytes, cur_shadow_byte, 4);
1219 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1220 offset += ASAN_RED_ZONE_SIZE;
1221 }
1222 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1223 }
1224 do_pending_stack_adjust ();
1225
1226 /* Construct epilogue sequence. */
1227 start_sequence ();
1228
1229 lab = NULL;
1230 if (use_after_return_class != -1)
1231 {
1232 rtx_code_label *lab2 = gen_label_rtx ();
1233 char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
1234 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1235 emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
1236 VOIDmode, 0, lab2, very_likely);
1237 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1238 set_mem_alias_set (shadow_mem, asan_shadow_set);
1239 mem = gen_rtx_MEM (ptr_mode, base);
1240 mem = adjust_address (mem, VOIDmode, base_align_bias);
1241 emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
1242 unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
1243 if (use_after_return_class < 5
1244 && can_store_by_pieces (sz, builtin_memset_read_str, &c,
1245 BITS_PER_UNIT, true))
1246 store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
1247 BITS_PER_UNIT, true, 0);
1248 else if (use_after_return_class >= 5
1249 || !set_storage_via_setmem (shadow_mem,
1250 GEN_INT (sz),
1251 gen_int_mode (c, QImode),
1252 BITS_PER_UNIT, BITS_PER_UNIT,
1253 -1, sz, sz, sz))
1254 {
1255 snprintf (buf, sizeof buf, "__asan_stack_free_%d",
1256 use_after_return_class);
1257 ret = init_one_libfunc (buf);
1258 rtx addr = convert_memory_address (ptr_mode, base);
1259 rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
1260 emit_library_call (ret, LCT_NORMAL, ptr_mode, 3, addr, ptr_mode,
1261 GEN_INT (asan_frame_size + base_align_bias),
1262 TYPE_MODE (pointer_sized_int_node),
1263 orig_addr, ptr_mode);
1264 }
1265 lab = gen_label_rtx ();
1266 emit_jump (lab);
1267 emit_label (lab2);
1268 }
1269
1270 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1271 set_mem_alias_set (shadow_mem, asan_shadow_set);
1272
1273 if (STRICT_ALIGNMENT)
1274 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1275
1276 prev_offset = base_offset;
1277 last_offset = base_offset;
1278 last_size = 0;
1279 for (l = length; l; l -= 2)
1280 {
1281 offset = base_offset + ((offsets[l - 1] - base_offset)
1282 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1283 if (last_offset + last_size != offset)
1284 {
1285 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1286 (last_offset - prev_offset)
1287 >> ASAN_SHADOW_SHIFT);
1288 prev_offset = last_offset;
1289 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1290 last_offset = offset;
1291 last_size = 0;
1292 }
1293 last_size += base_offset + ((offsets[l - 2] - base_offset)
1294 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1295 - offset;
1296 }
1297 if (last_size)
1298 {
1299 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1300 (last_offset - prev_offset)
1301 >> ASAN_SHADOW_SHIFT);
1302 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1303 }
1304
1305 do_pending_stack_adjust ();
1306 if (lab)
1307 emit_label (lab);
1308
1309 insns = get_insns ();
1310 end_sequence ();
1311 return insns;
1312 }
1313
1314 /* Return true if DECL, a global var, might be overridden and needs
1315 therefore a local alias. */
1316
1317 static bool
1318 asan_needs_local_alias (tree decl)
1319 {
1320 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1321 }
1322
1323 /* Return true if DECL is a VAR_DECL that should be protected
1324 by Address Sanitizer, by appending a red zone with protected
1325 shadow memory after it and aligning it to at least
1326 ASAN_RED_ZONE_SIZE bytes. */
1327
1328 bool
1329 asan_protect_global (tree decl)
1330 {
1331 if (!ASAN_GLOBALS)
1332 return false;
1333
1334 rtx rtl, symbol;
1335
1336 if (TREE_CODE (decl) == STRING_CST)
1337 {
1338 /* Instrument all STRING_CSTs except those created
1339 by asan_pp_string here. */
1340 if (shadow_ptr_types[0] != NULL_TREE
1341 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1342 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1343 return false;
1344 return true;
1345 }
1346 if (TREE_CODE (decl) != VAR_DECL
1347 /* TLS vars aren't statically protectable. */
1348 || DECL_THREAD_LOCAL_P (decl)
1349 /* Externs will be protected elsewhere. */
1350 || DECL_EXTERNAL (decl)
1351 || !DECL_RTL_SET_P (decl)
1352 /* Comdat vars pose an ABI problem, we can't know if
1353 the var that is selected by the linker will have
1354 padding or not. */
1355 || DECL_ONE_ONLY (decl)
1356 /* Similarly for common vars. People can use -fno-common. */
1357 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
1358 /* Don't protect if using user section, often vars placed
1359 into user section from multiple TUs are then assumed
1360 to be an array of such vars, putting padding in there
1361 breaks this assumption. */
1362 || (DECL_SECTION_NAME (decl) != NULL
1363 && !symtab_node::get (decl)->implicit_section)
1364 || DECL_SIZE (decl) == 0
1365 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1366 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1367 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
1368 || TREE_TYPE (decl) == ubsan_get_source_location_type ())
1369 return false;
1370
1371 rtl = DECL_RTL (decl);
1372 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1373 return false;
1374 symbol = XEXP (rtl, 0);
1375
1376 if (CONSTANT_POOL_ADDRESS_P (symbol)
1377 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1378 return false;
1379
1380 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1381 return false;
1382
1383 #ifndef ASM_OUTPUT_DEF
1384 if (asan_needs_local_alias (decl))
1385 return false;
1386 #endif
1387
1388 return true;
1389 }
1390
1391 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1392 IS_STORE is either 1 (for a store) or 0 (for a load). */
1393
1394 static tree
1395 report_error_func (bool is_store, HOST_WIDE_INT size_in_bytes, int *nargs)
1396 {
1397 static enum built_in_function report[2][6]
1398 = { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1399 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1400 BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
1401 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1402 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1403 BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } };
1404 if (size_in_bytes == -1)
1405 {
1406 *nargs = 2;
1407 return builtin_decl_implicit (report[is_store][5]);
1408 }
1409 *nargs = 1;
1410 return builtin_decl_implicit (report[is_store][exact_log2 (size_in_bytes)]);
1411 }
1412
1413 /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1414 IS_STORE is either 1 (for a store) or 0 (for a load). */
1415
1416 static tree
1417 check_func (bool is_store, int size_in_bytes, int *nargs)
1418 {
1419 static enum built_in_function check[2][6]
1420 = { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
1421 BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
1422 BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
1423 { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
1424 BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
1425 BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } };
1426 if (size_in_bytes == -1)
1427 {
1428 *nargs = 2;
1429 return builtin_decl_implicit (check[is_store][5]);
1430 }
1431 *nargs = 1;
1432 return builtin_decl_implicit (check[is_store][exact_log2 (size_in_bytes)]);
1433 }
1434
1435 /* Split the current basic block and create a condition statement
1436 insertion point right before or after the statement pointed to by
1437 ITER. Return an iterator to the point at which the caller might
1438 safely insert the condition statement.
1439
1440 THEN_BLOCK must be set to the address of an uninitialized instance
1441 of basic_block. The function will then set *THEN_BLOCK to the
1442 'then block' of the condition statement to be inserted by the
1443 caller.
1444
1445 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1446 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1447
1448 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1449 block' of the condition statement to be inserted by the caller.
1450
1451 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1452 statements starting from *ITER, and *THEN_BLOCK is a new empty
1453 block.
1454
1455 *ITER is adjusted to point to always point to the first statement
1456 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1457 same as what ITER was pointing to prior to calling this function,
1458 if BEFORE_P is true; otherwise, it is its following statement. */
1459
1460 gimple_stmt_iterator
1461 create_cond_insert_point (gimple_stmt_iterator *iter,
1462 bool before_p,
1463 bool then_more_likely_p,
1464 bool create_then_fallthru_edge,
1465 basic_block *then_block,
1466 basic_block *fallthrough_block)
1467 {
1468 gimple_stmt_iterator gsi = *iter;
1469
1470 if (!gsi_end_p (gsi) && before_p)
1471 gsi_prev (&gsi);
1472
1473 basic_block cur_bb = gsi_bb (*iter);
1474
1475 edge e = split_block (cur_bb, gsi_stmt (gsi));
1476
1477 /* Get a hold on the 'condition block', the 'then block' and the
1478 'else block'. */
1479 basic_block cond_bb = e->src;
1480 basic_block fallthru_bb = e->dest;
1481 basic_block then_bb = create_empty_bb (cond_bb);
1482 if (current_loops)
1483 {
1484 add_bb_to_loop (then_bb, cond_bb->loop_father);
1485 loops_state_set (LOOPS_NEED_FIXUP);
1486 }
1487
1488 /* Set up the newly created 'then block'. */
1489 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1490 int fallthrough_probability
1491 = then_more_likely_p
1492 ? PROB_VERY_UNLIKELY
1493 : PROB_ALWAYS - PROB_VERY_UNLIKELY;
1494 e->probability = PROB_ALWAYS - fallthrough_probability;
1495 if (create_then_fallthru_edge)
1496 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1497
1498 /* Set up the fallthrough basic block. */
1499 e = find_edge (cond_bb, fallthru_bb);
1500 e->flags = EDGE_FALSE_VALUE;
1501 e->count = cond_bb->count;
1502 e->probability = fallthrough_probability;
1503
1504 /* Update dominance info for the newly created then_bb; note that
1505 fallthru_bb's dominance info has already been updated by
1506 split_bock. */
1507 if (dom_info_available_p (CDI_DOMINATORS))
1508 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1509
1510 *then_block = then_bb;
1511 *fallthrough_block = fallthru_bb;
1512 *iter = gsi_start_bb (fallthru_bb);
1513
1514 return gsi_last_bb (cond_bb);
1515 }
1516
1517 /* Insert an if condition followed by a 'then block' right before the
1518 statement pointed to by ITER. The fallthrough block -- which is the
1519 else block of the condition as well as the destination of the
1520 outcoming edge of the 'then block' -- starts with the statement
1521 pointed to by ITER.
1522
1523 COND is the condition of the if.
1524
1525 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1526 'then block' is higher than the probability of the edge to the
1527 fallthrough block.
1528
1529 Upon completion of the function, *THEN_BB is set to the newly
1530 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1531 fallthrough block.
1532
1533 *ITER is adjusted to still point to the same statement it was
1534 pointing to initially. */
1535
1536 static void
1537 insert_if_then_before_iter (gimple cond,
1538 gimple_stmt_iterator *iter,
1539 bool then_more_likely_p,
1540 basic_block *then_bb,
1541 basic_block *fallthrough_bb)
1542 {
1543 gimple_stmt_iterator cond_insert_point =
1544 create_cond_insert_point (iter,
1545 /*before_p=*/true,
1546 then_more_likely_p,
1547 /*create_then_fallthru_edge=*/true,
1548 then_bb,
1549 fallthrough_bb);
1550 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
1551 }
1552
1553 /* Build
1554 (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset (). */
1555
1556 static tree
1557 build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
1558 tree base_addr, tree shadow_ptr_type)
1559 {
1560 tree t, uintptr_type = TREE_TYPE (base_addr);
1561 tree shadow_type = TREE_TYPE (shadow_ptr_type);
1562 gimple g;
1563
1564 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
1565 g = gimple_build_assign_with_ops (RSHIFT_EXPR,
1566 make_ssa_name (uintptr_type, NULL),
1567 base_addr, t);
1568 gimple_set_location (g, location);
1569 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1570
1571 t = build_int_cst (uintptr_type, asan_shadow_offset ());
1572 g = gimple_build_assign_with_ops (PLUS_EXPR,
1573 make_ssa_name (uintptr_type, NULL),
1574 gimple_assign_lhs (g), t);
1575 gimple_set_location (g, location);
1576 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1577
1578 g = gimple_build_assign_with_ops (NOP_EXPR,
1579 make_ssa_name (shadow_ptr_type, NULL),
1580 gimple_assign_lhs (g), NULL_TREE);
1581 gimple_set_location (g, location);
1582 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1583
1584 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
1585 build_int_cst (shadow_ptr_type, 0));
1586 g = gimple_build_assign_with_ops (MEM_REF,
1587 make_ssa_name (shadow_type, NULL),
1588 t, NULL_TREE);
1589 gimple_set_location (g, location);
1590 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1591 return gimple_assign_lhs (g);
1592 }
1593
1594 /* BASE can already be an SSA_NAME; in that case, do not create a
1595 new SSA_NAME for it. */
1596
1597 static tree
1598 maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
1599 bool before_p)
1600 {
1601 if (TREE_CODE (base) == SSA_NAME)
1602 return base;
1603 gimple g
1604 = gimple_build_assign_with_ops (TREE_CODE (base),
1605 make_ssa_name (TREE_TYPE (base), NULL),
1606 base, NULL_TREE);
1607 gimple_set_location (g, loc);
1608 if (before_p)
1609 gsi_insert_before (iter, g, GSI_SAME_STMT);
1610 else
1611 gsi_insert_after (iter, g, GSI_NEW_STMT);
1612 return gimple_assign_lhs (g);
1613 }
1614
1615 /* LEN can already have necessary size and precision;
1616 in that case, do not create a new variable. */
1617
1618 tree
1619 maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
1620 bool before_p)
1621 {
1622 if (ptrofftype_p (len))
1623 return len;
1624 gimple g
1625 = gimple_build_assign_with_ops (NOP_EXPR,
1626 make_ssa_name (pointer_sized_int_node, NULL),
1627 len, NULL);
1628 gimple_set_location (g, loc);
1629 if (before_p)
1630 gsi_insert_before (iter, g, GSI_SAME_STMT);
1631 else
1632 gsi_insert_after (iter, g, GSI_NEW_STMT);
1633 return gimple_assign_lhs (g);
1634 }
1635
1636 /* Instrument the memory access instruction BASE. Insert new
1637 statements before or after ITER.
1638
1639 Note that the memory access represented by BASE can be either an
1640 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1641 location. IS_STORE is TRUE for a store, FALSE for a load.
1642 BEFORE_P is TRUE for inserting the instrumentation code before
1643 ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
1644 for a scalar memory access and FALSE for memory region access.
1645 NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
1646 length. ALIGN tells alignment of accessed memory object.
1647
1648 START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
1649 memory region have already been instrumented.
1650
1651 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1652 statement it was pointing to prior to calling this function,
1653 otherwise, it points to the statement logically following it. */
1654
1655 static void
1656 build_check_stmt (location_t loc, tree base, tree len,
1657 HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
1658 bool is_non_zero_len, bool before_p, bool is_store,
1659 bool is_scalar_access, unsigned int align = 0,
1660 bool start_instrumented = false,
1661 bool end_instrumented = false)
1662 {
1663 gimple_stmt_iterator gsi = *iter;
1664 gimple g;
1665
1666 gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
1667
1668 if (start_instrumented && end_instrumented)
1669 {
1670 if (!before_p)
1671 gsi_next (iter);
1672 return;
1673 }
1674
1675 gsi = *iter;
1676
1677 base = unshare_expr (base);
1678 base = maybe_create_ssa_name (loc, base, &gsi, before_p);
1679
1680 if (len)
1681 {
1682 len = unshare_expr (len);
1683 len = maybe_cast_to_ptrmode (loc, len, iter, before_p);
1684 }
1685 else
1686 {
1687 gcc_assert (size_in_bytes != -1);
1688 len = build_int_cst (pointer_sized_int_node, size_in_bytes);
1689 }
1690
1691 if (size_in_bytes > 1)
1692 {
1693 if ((size_in_bytes & (size_in_bytes - 1)) != 0
1694 || size_in_bytes > 16)
1695 is_scalar_access = false;
1696 else if (align && align < size_in_bytes * BITS_PER_UNIT)
1697 {
1698 /* On non-strict alignment targets, if
1699 16-byte access is just 8-byte aligned,
1700 this will result in misaligned shadow
1701 memory 2 byte load, but otherwise can
1702 be handled using one read. */
1703 if (size_in_bytes != 16
1704 || STRICT_ALIGNMENT
1705 || align < 8 * BITS_PER_UNIT)
1706 is_scalar_access = false;
1707 }
1708 }
1709
1710 HOST_WIDE_INT flags = 0;
1711 if (is_store)
1712 flags |= ASAN_CHECK_STORE;
1713 if (is_non_zero_len)
1714 flags |= ASAN_CHECK_NON_ZERO_LEN;
1715 if (is_scalar_access)
1716 flags |= ASAN_CHECK_SCALAR_ACCESS;
1717 if (start_instrumented)
1718 flags |= ASAN_CHECK_START_INSTRUMENTED;
1719 if (end_instrumented)
1720 flags |= ASAN_CHECK_END_INSTRUMENTED;
1721
1722 g = gimple_build_call_internal (IFN_ASAN_CHECK, 4,
1723 build_int_cst (integer_type_node, flags),
1724 base, len,
1725 build_int_cst (integer_type_node,
1726 align / BITS_PER_UNIT));
1727 gimple_set_location (g, loc);
1728 if (before_p)
1729 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
1730 else
1731 {
1732 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1733 gsi_next (&gsi);
1734 *iter = gsi;
1735 }
1736 }
1737
1738 /* If T represents a memory access, add instrumentation code before ITER.
1739 LOCATION is source code location.
1740 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
1741
1742 static void
1743 instrument_derefs (gimple_stmt_iterator *iter, tree t,
1744 location_t location, bool is_store)
1745 {
1746 if (is_store && !ASAN_INSTRUMENT_WRITES)
1747 return;
1748 if (!is_store && !ASAN_INSTRUMENT_READS)
1749 return;
1750
1751 tree type, base;
1752 HOST_WIDE_INT size_in_bytes;
1753
1754 type = TREE_TYPE (t);
1755 switch (TREE_CODE (t))
1756 {
1757 case ARRAY_REF:
1758 case COMPONENT_REF:
1759 case INDIRECT_REF:
1760 case MEM_REF:
1761 case VAR_DECL:
1762 case BIT_FIELD_REF:
1763 break;
1764 /* FALLTHRU */
1765 default:
1766 return;
1767 }
1768
1769 size_in_bytes = int_size_in_bytes (type);
1770 if (size_in_bytes <= 0)
1771 return;
1772
1773 HOST_WIDE_INT bitsize, bitpos;
1774 tree offset;
1775 enum machine_mode mode;
1776 int volatilep = 0, unsignedp = 0;
1777 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset,
1778 &mode, &unsignedp, &volatilep, false);
1779
1780 if (TREE_CODE (t) == COMPONENT_REF
1781 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
1782 {
1783 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
1784 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
1785 TREE_OPERAND (t, 0), repr,
1786 NULL_TREE), location, is_store);
1787 return;
1788 }
1789
1790 if (bitpos % BITS_PER_UNIT
1791 || bitsize != size_in_bytes * BITS_PER_UNIT)
1792 return;
1793
1794 if (TREE_CODE (inner) == VAR_DECL
1795 && offset == NULL_TREE
1796 && bitpos >= 0
1797 && DECL_SIZE (inner)
1798 && tree_fits_shwi_p (DECL_SIZE (inner))
1799 && bitpos + bitsize <= tree_to_shwi (DECL_SIZE (inner)))
1800 {
1801 if (DECL_THREAD_LOCAL_P (inner))
1802 return;
1803 if (!TREE_STATIC (inner))
1804 {
1805 /* Automatic vars in the current function will be always
1806 accessible. */
1807 if (decl_function_context (inner) == current_function_decl)
1808 return;
1809 }
1810 /* Always instrument external vars, they might be dynamically
1811 initialized. */
1812 else if (!DECL_EXTERNAL (inner))
1813 {
1814 /* For static vars if they are known not to be dynamically
1815 initialized, they will be always accessible. */
1816 varpool_node *vnode = varpool_node::get (inner);
1817 if (vnode && !vnode->dynamically_initialized)
1818 return;
1819 }
1820 }
1821
1822 base = build_fold_addr_expr (t);
1823 if (!has_mem_ref_been_instrumented (base, size_in_bytes))
1824 {
1825 unsigned int align = get_object_alignment (t);
1826 build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
1827 /*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true,
1828 is_store, /*is_scalar_access*/true, align);
1829 update_mem_ref_hash_table (base, size_in_bytes);
1830 update_mem_ref_hash_table (t, size_in_bytes);
1831 }
1832
1833 }
1834
1835 /* Instrument an access to a contiguous memory region that starts at
1836 the address pointed to by BASE, over a length of LEN (expressed in
1837 the sizeof (*BASE) bytes). ITER points to the instruction before
1838 which the instrumentation instructions must be inserted. LOCATION
1839 is the source location that the instrumentation instructions must
1840 have. If IS_STORE is true, then the memory access is a store;
1841 otherwise, it's a load. */
1842
1843 static void
1844 instrument_mem_region_access (tree base, tree len,
1845 gimple_stmt_iterator *iter,
1846 location_t location, bool is_store)
1847 {
1848 if (!POINTER_TYPE_P (TREE_TYPE (base))
1849 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
1850 || integer_zerop (len))
1851 return;
1852
1853 /* If the beginning of the memory region has already been
1854 instrumented, do not instrument it. */
1855 bool start_instrumented = has_mem_ref_been_instrumented (base, 1);
1856
1857 /* If the end of the memory region has already been instrumented, do
1858 not instrument it. */
1859 tree end = asan_mem_ref_get_end (base, len);
1860 bool end_instrumented = has_mem_ref_been_instrumented (end, 1);
1861
1862 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1863
1864 build_check_stmt (location, base, len, size_in_bytes, iter,
1865 /*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true,
1866 is_store, /*is_scalar_access*/false, /*align*/0,
1867 start_instrumented, end_instrumented);
1868
1869 update_mem_ref_hash_table (base, 1);
1870 if (size_in_bytes != -1)
1871 update_mem_ref_hash_table (end, 1);
1872
1873 *iter = gsi_for_stmt (gsi_stmt (*iter));
1874 }
1875
1876 /* Instrument the call (to the builtin strlen function) pointed to by
1877 ITER.
1878
1879 This function instruments the access to the first byte of the
1880 argument, right before the call. After the call it instruments the
1881 access to the last byte of the argument; it uses the result of the
1882 call to deduce the offset of that last byte.
1883
1884 Upon completion, iff the call has actually been instrumented, this
1885 function returns TRUE and *ITER points to the statement logically
1886 following the built-in strlen function call *ITER was initially
1887 pointing to. Otherwise, the function returns FALSE and *ITER
1888 remains unchanged. */
1889
1890 static bool
1891 instrument_strlen_call (gimple_stmt_iterator *iter)
1892 {
1893 gimple g;
1894 gimple call = gsi_stmt (*iter);
1895 gcc_assert (is_gimple_call (call));
1896
1897 tree callee = gimple_call_fndecl (call);
1898 gcc_assert (is_builtin_fn (callee)
1899 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
1900 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STRLEN);
1901
1902 location_t loc = gimple_location (call);
1903
1904 tree len = gimple_call_lhs (call);
1905 if (len == NULL)
1906 /* Some passes might clear the return value of the strlen call;
1907 bail out in that case. Return FALSE as we are not advancing
1908 *ITER. */
1909 return false;
1910 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (len)));
1911
1912 len = maybe_cast_to_ptrmode (loc, len, iter, /*before_p*/false);
1913
1914 tree str_arg = gimple_call_arg (call, 0);
1915 bool start_instrumented = has_mem_ref_been_instrumented (str_arg, 1);
1916
1917 tree cptr_type = build_pointer_type (char_type_node);
1918 g = gimple_build_assign_with_ops (NOP_EXPR,
1919 make_ssa_name (cptr_type, NULL),
1920 str_arg, NULL);
1921 gimple_set_location (g, loc);
1922 gsi_insert_before (iter, g, GSI_SAME_STMT);
1923 str_arg = gimple_assign_lhs (g);
1924
1925 build_check_stmt (loc, str_arg, NULL_TREE, 1, iter,
1926 /*is_non_zero_len*/true, /*before_p=*/true,
1927 /*is_store=*/false, /*is_scalar_access*/true, /*align*/0,
1928 start_instrumented, start_instrumented);
1929
1930 g = gimple_build_assign_with_ops (POINTER_PLUS_EXPR,
1931 make_ssa_name (cptr_type, NULL),
1932 str_arg,
1933 len);
1934 gimple_set_location (g, loc);
1935 gsi_insert_after (iter, g, GSI_NEW_STMT);
1936
1937 build_check_stmt (loc, gimple_assign_lhs (g), NULL_TREE, 1, iter,
1938 /*is_non_zero_len*/true, /*before_p=*/false,
1939 /*is_store=*/false, /*is_scalar_access*/true, /*align*/0);
1940
1941 return true;
1942 }
1943
1944 /* Instrument the call to a built-in memory access function that is
1945 pointed to by the iterator ITER.
1946
1947 Upon completion, return TRUE iff *ITER has been advanced to the
1948 statement following the one it was originally pointing to. */
1949
1950 static bool
1951 instrument_builtin_call (gimple_stmt_iterator *iter)
1952 {
1953 if (!ASAN_MEMINTRIN)
1954 return false;
1955
1956 bool iter_advanced_p = false;
1957 gimple call = gsi_stmt (*iter);
1958
1959 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
1960
1961 tree callee = gimple_call_fndecl (call);
1962 location_t loc = gimple_location (call);
1963
1964 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STRLEN)
1965 iter_advanced_p = instrument_strlen_call (iter);
1966 else
1967 {
1968 asan_mem_ref src0, src1, dest;
1969 asan_mem_ref_init (&src0, NULL, 1);
1970 asan_mem_ref_init (&src1, NULL, 1);
1971 asan_mem_ref_init (&dest, NULL, 1);
1972
1973 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
1974 bool src0_is_store = false, src1_is_store = false,
1975 dest_is_store = false, dest_is_deref = false;
1976
1977 if (get_mem_refs_of_builtin_call (call,
1978 &src0, &src0_len, &src0_is_store,
1979 &src1, &src1_len, &src1_is_store,
1980 &dest, &dest_len, &dest_is_store,
1981 &dest_is_deref))
1982 {
1983 if (dest_is_deref)
1984 {
1985 instrument_derefs (iter, dest.start, loc, dest_is_store);
1986 gsi_next (iter);
1987 iter_advanced_p = true;
1988 }
1989 else if (src0_len || src1_len || dest_len)
1990 {
1991 if (src0.start != NULL_TREE)
1992 instrument_mem_region_access (src0.start, src0_len,
1993 iter, loc, /*is_store=*/false);
1994 if (src1.start != NULL_TREE)
1995 instrument_mem_region_access (src1.start, src1_len,
1996 iter, loc, /*is_store=*/false);
1997 if (dest.start != NULL_TREE)
1998 instrument_mem_region_access (dest.start, dest_len,
1999 iter, loc, /*is_store=*/true);
2000 *iter = gsi_for_stmt (call);
2001 gsi_next (iter);
2002 iter_advanced_p = true;
2003 }
2004 }
2005 }
2006 return iter_advanced_p;
2007 }
2008
2009 /* Instrument the assignment statement ITER if it is subject to
2010 instrumentation. Return TRUE iff instrumentation actually
2011 happened. In that case, the iterator ITER is advanced to the next
2012 logical expression following the one initially pointed to by ITER,
2013 and the relevant memory reference that which access has been
2014 instrumented is added to the memory references hash table. */
2015
2016 static bool
2017 maybe_instrument_assignment (gimple_stmt_iterator *iter)
2018 {
2019 gimple s = gsi_stmt (*iter);
2020
2021 gcc_assert (gimple_assign_single_p (s));
2022
2023 tree ref_expr = NULL_TREE;
2024 bool is_store, is_instrumented = false;
2025
2026 if (gimple_store_p (s))
2027 {
2028 ref_expr = gimple_assign_lhs (s);
2029 is_store = true;
2030 instrument_derefs (iter, ref_expr,
2031 gimple_location (s),
2032 is_store);
2033 is_instrumented = true;
2034 }
2035
2036 if (gimple_assign_load_p (s))
2037 {
2038 ref_expr = gimple_assign_rhs1 (s);
2039 is_store = false;
2040 instrument_derefs (iter, ref_expr,
2041 gimple_location (s),
2042 is_store);
2043 is_instrumented = true;
2044 }
2045
2046 if (is_instrumented)
2047 gsi_next (iter);
2048
2049 return is_instrumented;
2050 }
2051
2052 /* Instrument the function call pointed to by the iterator ITER, if it
2053 is subject to instrumentation. At the moment, the only function
2054 calls that are instrumented are some built-in functions that access
2055 memory. Look at instrument_builtin_call to learn more.
2056
2057 Upon completion return TRUE iff *ITER was advanced to the statement
2058 following the one it was originally pointing to. */
2059
2060 static bool
2061 maybe_instrument_call (gimple_stmt_iterator *iter)
2062 {
2063 gimple stmt = gsi_stmt (*iter);
2064 bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
2065
2066 if (is_builtin && instrument_builtin_call (iter))
2067 return true;
2068
2069 if (gimple_call_noreturn_p (stmt))
2070 {
2071 if (is_builtin)
2072 {
2073 tree callee = gimple_call_fndecl (stmt);
2074 switch (DECL_FUNCTION_CODE (callee))
2075 {
2076 case BUILT_IN_UNREACHABLE:
2077 case BUILT_IN_TRAP:
2078 /* Don't instrument these. */
2079 return false;
2080 default:
2081 break;
2082 }
2083 }
2084 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
2085 gimple g = gimple_build_call (decl, 0);
2086 gimple_set_location (g, gimple_location (stmt));
2087 gsi_insert_before (iter, g, GSI_SAME_STMT);
2088 }
2089 return false;
2090 }
2091
2092 /* Walk each instruction of all basic block and instrument those that
2093 represent memory references: loads, stores, or function calls.
2094 In a given basic block, this function avoids instrumenting memory
2095 references that have already been instrumented. */
2096
2097 static void
2098 transform_statements (void)
2099 {
2100 basic_block bb, last_bb = NULL;
2101 gimple_stmt_iterator i;
2102 int saved_last_basic_block = last_basic_block_for_fn (cfun);
2103
2104 FOR_EACH_BB_FN (bb, cfun)
2105 {
2106 basic_block prev_bb = bb;
2107
2108 if (bb->index >= saved_last_basic_block) continue;
2109
2110 /* Flush the mem ref hash table, if current bb doesn't have
2111 exactly one predecessor, or if that predecessor (skipping
2112 over asan created basic blocks) isn't the last processed
2113 basic block. Thus we effectively flush on extended basic
2114 block boundaries. */
2115 while (single_pred_p (prev_bb))
2116 {
2117 prev_bb = single_pred (prev_bb);
2118 if (prev_bb->index < saved_last_basic_block)
2119 break;
2120 }
2121 if (prev_bb != last_bb)
2122 empty_mem_ref_hash_table ();
2123 last_bb = bb;
2124
2125 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
2126 {
2127 gimple s = gsi_stmt (i);
2128
2129 if (has_stmt_been_instrumented_p (s))
2130 gsi_next (&i);
2131 else if (gimple_assign_single_p (s)
2132 && !gimple_clobber_p (s)
2133 && maybe_instrument_assignment (&i))
2134 /* Nothing to do as maybe_instrument_assignment advanced
2135 the iterator I. */;
2136 else if (is_gimple_call (s) && maybe_instrument_call (&i))
2137 /* Nothing to do as maybe_instrument_call
2138 advanced the iterator I. */;
2139 else
2140 {
2141 /* No instrumentation happened.
2142
2143 If the current instruction is a function call that
2144 might free something, let's forget about the memory
2145 references that got instrumented. Otherwise we might
2146 miss some instrumentation opportunities. */
2147 if (is_gimple_call (s) && !nonfreeing_call_p (s))
2148 empty_mem_ref_hash_table ();
2149
2150 gsi_next (&i);
2151 }
2152 }
2153 }
2154 free_mem_ref_resources ();
2155 }
2156
2157 /* Build
2158 __asan_before_dynamic_init (module_name)
2159 or
2160 __asan_after_dynamic_init ()
2161 call. */
2162
2163 tree
2164 asan_dynamic_init_call (bool after_p)
2165 {
2166 tree fn = builtin_decl_implicit (after_p
2167 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2168 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
2169 tree module_name_cst = NULL_TREE;
2170 if (!after_p)
2171 {
2172 pretty_printer module_name_pp;
2173 pp_string (&module_name_pp, main_input_filename);
2174
2175 if (shadow_ptr_types[0] == NULL_TREE)
2176 asan_init_shadow_ptr_types ();
2177 module_name_cst = asan_pp_string (&module_name_pp);
2178 module_name_cst = fold_convert (const_ptr_type_node,
2179 module_name_cst);
2180 }
2181
2182 return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
2183 }
2184
2185 /* Build
2186 struct __asan_global
2187 {
2188 const void *__beg;
2189 uptr __size;
2190 uptr __size_with_redzone;
2191 const void *__name;
2192 const void *__module_name;
2193 uptr __has_dynamic_init;
2194 __asan_global_source_location *__location;
2195 } type. */
2196
2197 static tree
2198 asan_global_struct (void)
2199 {
2200 static const char *field_names[7]
2201 = { "__beg", "__size", "__size_with_redzone",
2202 "__name", "__module_name", "__has_dynamic_init", "__location"};
2203 tree fields[7], ret;
2204 int i;
2205
2206 ret = make_node (RECORD_TYPE);
2207 for (i = 0; i < 7; i++)
2208 {
2209 fields[i]
2210 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2211 get_identifier (field_names[i]),
2212 (i == 0 || i == 3) ? const_ptr_type_node
2213 : pointer_sized_int_node);
2214 DECL_CONTEXT (fields[i]) = ret;
2215 if (i)
2216 DECL_CHAIN (fields[i - 1]) = fields[i];
2217 }
2218 TYPE_FIELDS (ret) = fields[0];
2219 TYPE_NAME (ret) = get_identifier ("__asan_global");
2220 layout_type (ret);
2221 return ret;
2222 }
2223
2224 /* Append description of a single global DECL into vector V.
2225 TYPE is __asan_global struct type as returned by asan_global_struct. */
2226
2227 static void
2228 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
2229 {
2230 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2231 unsigned HOST_WIDE_INT size;
2232 tree str_cst, module_name_cst, refdecl = decl;
2233 vec<constructor_elt, va_gc> *vinner = NULL;
2234
2235 pretty_printer asan_pp, module_name_pp;
2236
2237 if (DECL_NAME (decl))
2238 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
2239 else
2240 pp_string (&asan_pp, "<unknown>");
2241 str_cst = asan_pp_string (&asan_pp);
2242
2243 pp_string (&module_name_pp, main_input_filename);
2244 module_name_cst = asan_pp_string (&module_name_pp);
2245
2246 if (asan_needs_local_alias (decl))
2247 {
2248 char buf[20];
2249 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
2250 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2251 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2252 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2253 TREE_READONLY (refdecl) = TREE_READONLY (decl);
2254 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2255 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2256 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2257 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2258 TREE_STATIC (refdecl) = 1;
2259 TREE_PUBLIC (refdecl) = 0;
2260 TREE_USED (refdecl) = 1;
2261 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2262 }
2263
2264 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2265 fold_convert (const_ptr_type_node,
2266 build_fold_addr_expr (refdecl)));
2267 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2268 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2269 size += asan_red_zone_size (size);
2270 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2271 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2272 fold_convert (const_ptr_type_node, str_cst));
2273 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2274 fold_convert (const_ptr_type_node, module_name_cst));
2275 varpool_node *vnode = varpool_node::get (decl);
2276 int has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
2277 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2278 build_int_cst (uptr, has_dynamic_init));
2279 tree locptr = NULL_TREE;
2280 location_t loc = DECL_SOURCE_LOCATION (decl);
2281 expanded_location xloc = expand_location (loc);
2282 if (xloc.file != NULL)
2283 {
2284 static int lasanloccnt = 0;
2285 char buf[25];
2286 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt);
2287 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2288 ubsan_get_source_location_type ());
2289 TREE_STATIC (var) = 1;
2290 TREE_PUBLIC (var) = 0;
2291 DECL_ARTIFICIAL (var) = 1;
2292 DECL_IGNORED_P (var) = 1;
2293 pretty_printer filename_pp;
2294 pp_string (&filename_pp, xloc.file);
2295 tree str = asan_pp_string (&filename_pp);
2296 tree ctor = build_constructor_va (TREE_TYPE (var), 3,
2297 NULL_TREE, str, NULL_TREE,
2298 build_int_cst (unsigned_type_node,
2299 xloc.line), NULL_TREE,
2300 build_int_cst (unsigned_type_node,
2301 xloc.column));
2302 TREE_CONSTANT (ctor) = 1;
2303 TREE_STATIC (ctor) = 1;
2304 DECL_INITIAL (var) = ctor;
2305 varpool_node::finalize_decl (var);
2306 locptr = fold_convert (uptr, build_fold_addr_expr (var));
2307 }
2308 else
2309 locptr = build_int_cst (uptr, 0);
2310 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr);
2311 init = build_constructor (type, vinner);
2312 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2313 }
2314
2315 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2316 void
2317 initialize_sanitizer_builtins (void)
2318 {
2319 tree decl;
2320
2321 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2322 return;
2323
2324 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2325 tree BT_FN_VOID_PTR
2326 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2327 tree BT_FN_VOID_CONST_PTR
2328 = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
2329 tree BT_FN_VOID_PTR_PTR
2330 = build_function_type_list (void_type_node, ptr_type_node,
2331 ptr_type_node, NULL_TREE);
2332 tree BT_FN_VOID_PTR_PTR_PTR
2333 = build_function_type_list (void_type_node, ptr_type_node,
2334 ptr_type_node, ptr_type_node, NULL_TREE);
2335 tree BT_FN_VOID_PTR_PTRMODE
2336 = build_function_type_list (void_type_node, ptr_type_node,
2337 pointer_sized_int_node, NULL_TREE);
2338 tree BT_FN_VOID_INT
2339 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
2340 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2341 tree BT_FN_IX_CONST_VPTR_INT[5];
2342 tree BT_FN_IX_VPTR_IX_INT[5];
2343 tree BT_FN_VOID_VPTR_IX_INT[5];
2344 tree vptr
2345 = build_pointer_type (build_qualified_type (void_type_node,
2346 TYPE_QUAL_VOLATILE));
2347 tree cvptr
2348 = build_pointer_type (build_qualified_type (void_type_node,
2349 TYPE_QUAL_VOLATILE
2350 |TYPE_QUAL_CONST));
2351 tree boolt
2352 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2353 int i;
2354 for (i = 0; i < 5; i++)
2355 {
2356 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2357 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2358 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
2359 integer_type_node, integer_type_node,
2360 NULL_TREE);
2361 BT_FN_IX_CONST_VPTR_INT[i]
2362 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2363 BT_FN_IX_VPTR_IX_INT[i]
2364 = build_function_type_list (ix, vptr, ix, integer_type_node,
2365 NULL_TREE);
2366 BT_FN_VOID_VPTR_IX_INT[i]
2367 = build_function_type_list (void_type_node, vptr, ix,
2368 integer_type_node, NULL_TREE);
2369 }
2370 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2371 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2372 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2373 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2374 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2375 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2376 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2377 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2378 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2379 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2380 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2381 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2382 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2383 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2384 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2385 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2386 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2387 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2388 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2389 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2390 #undef ATTR_NOTHROW_LEAF_LIST
2391 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2392 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2393 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2394 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2395 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2396 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2397 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2398 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2399 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2400 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2401 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2402 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2403 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2404 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2405 #undef DEF_SANITIZER_BUILTIN
2406 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2407 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2408 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2409 set_call_expr_flags (decl, ATTRS); \
2410 set_builtin_decl (ENUM, decl, true);
2411
2412 #include "sanitizer.def"
2413
2414 #undef DEF_SANITIZER_BUILTIN
2415 }
2416
2417 /* Called via htab_traverse. Count number of emitted
2418 STRING_CSTs in the constant hash table. */
2419
2420 int
2421 count_string_csts (constant_descriptor_tree **slot,
2422 unsigned HOST_WIDE_INT *data)
2423 {
2424 struct constant_descriptor_tree *desc = *slot;
2425 if (TREE_CODE (desc->value) == STRING_CST
2426 && TREE_ASM_WRITTEN (desc->value)
2427 && asan_protect_global (desc->value))
2428 ++*data;
2429 return 1;
2430 }
2431
2432 /* Helper structure to pass two parameters to
2433 add_string_csts. */
2434
2435 struct asan_add_string_csts_data
2436 {
2437 tree type;
2438 vec<constructor_elt, va_gc> *v;
2439 };
2440
2441 /* Called via hash_table::traverse. Call asan_add_global
2442 on emitted STRING_CSTs from the constant hash table. */
2443
2444 int
2445 add_string_csts (constant_descriptor_tree **slot,
2446 asan_add_string_csts_data *aascd)
2447 {
2448 struct constant_descriptor_tree *desc = *slot;
2449 if (TREE_CODE (desc->value) == STRING_CST
2450 && TREE_ASM_WRITTEN (desc->value)
2451 && asan_protect_global (desc->value))
2452 {
2453 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
2454 aascd->type, aascd->v);
2455 }
2456 return 1;
2457 }
2458
2459 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2460 invoke ggc_collect. */
2461 static GTY(()) tree asan_ctor_statements;
2462
2463 /* Module-level instrumentation.
2464 - Insert __asan_init_vN() into the list of CTORs.
2465 - TODO: insert redzones around globals.
2466 */
2467
2468 void
2469 asan_finish_file (void)
2470 {
2471 varpool_node *vnode;
2472 unsigned HOST_WIDE_INT gcount = 0;
2473
2474 if (shadow_ptr_types[0] == NULL_TREE)
2475 asan_init_shadow_ptr_types ();
2476 /* Avoid instrumenting code in the asan ctors/dtors.
2477 We don't need to insert padding after the description strings,
2478 nor after .LASAN* array. */
2479 flag_sanitize &= ~SANITIZE_ADDRESS;
2480
2481 if (flag_sanitize & SANITIZE_USER_ADDRESS)
2482 {
2483 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
2484 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2485 }
2486 FOR_EACH_DEFINED_VARIABLE (vnode)
2487 if (TREE_ASM_WRITTEN (vnode->decl)
2488 && asan_protect_global (vnode->decl))
2489 ++gcount;
2490 hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab ();
2491 const_desc_htab->traverse<unsigned HOST_WIDE_INT *, count_string_csts>
2492 (&gcount);
2493 if (gcount)
2494 {
2495 tree type = asan_global_struct (), var, ctor;
2496 tree dtor_statements = NULL_TREE;
2497 vec<constructor_elt, va_gc> *v;
2498 char buf[20];
2499
2500 type = build_array_type_nelts (type, gcount);
2501 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
2502 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2503 type);
2504 TREE_STATIC (var) = 1;
2505 TREE_PUBLIC (var) = 0;
2506 DECL_ARTIFICIAL (var) = 1;
2507 DECL_IGNORED_P (var) = 1;
2508 vec_alloc (v, gcount);
2509 FOR_EACH_DEFINED_VARIABLE (vnode)
2510 if (TREE_ASM_WRITTEN (vnode->decl)
2511 && asan_protect_global (vnode->decl))
2512 asan_add_global (vnode->decl, TREE_TYPE (type), v);
2513 struct asan_add_string_csts_data aascd;
2514 aascd.type = TREE_TYPE (type);
2515 aascd.v = v;
2516 const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts>
2517 (&aascd);
2518 ctor = build_constructor (type, v);
2519 TREE_CONSTANT (ctor) = 1;
2520 TREE_STATIC (ctor) = 1;
2521 DECL_INITIAL (var) = ctor;
2522 varpool_node::finalize_decl (var);
2523
2524 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
2525 tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
2526 append_to_statement_list (build_call_expr (fn, 2,
2527 build_fold_addr_expr (var),
2528 gcount_tree),
2529 &asan_ctor_statements);
2530
2531 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
2532 append_to_statement_list (build_call_expr (fn, 2,
2533 build_fold_addr_expr (var),
2534 gcount_tree),
2535 &dtor_statements);
2536 cgraph_build_static_cdtor ('D', dtor_statements,
2537 MAX_RESERVED_INIT_PRIORITY - 1);
2538 }
2539 if (asan_ctor_statements)
2540 cgraph_build_static_cdtor ('I', asan_ctor_statements,
2541 MAX_RESERVED_INIT_PRIORITY - 1);
2542 flag_sanitize |= SANITIZE_ADDRESS;
2543 }
2544
2545 /* Expand the ASAN_{LOAD,STORE} builtins. */
2546
2547 static bool
2548 asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
2549 {
2550 gimple g = gsi_stmt (*iter);
2551 location_t loc = gimple_location (g);
2552
2553 HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
2554 gcc_assert (flags < ASAN_CHECK_LAST);
2555 bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
2556 bool is_store = (flags & ASAN_CHECK_STORE) != 0;
2557 bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
2558 bool start_instrumented = (flags & ASAN_CHECK_START_INSTRUMENTED) != 0;
2559 bool end_instrumented = (flags & ASAN_CHECK_END_INSTRUMENTED) != 0;
2560
2561 tree base = gimple_call_arg (g, 1);
2562 tree len = gimple_call_arg (g, 2);
2563 HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3));
2564
2565 HOST_WIDE_INT size_in_bytes
2566 = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2567
2568 if (use_calls)
2569 {
2570 /* Instrument using callbacks. */
2571 gimple g
2572 = gimple_build_assign_with_ops (NOP_EXPR,
2573 make_ssa_name (pointer_sized_int_node,
2574 NULL),
2575 base, NULL_TREE);
2576 gimple_set_location (g, loc);
2577 gsi_insert_before (iter, g, GSI_SAME_STMT);
2578 tree base_addr = gimple_assign_lhs (g);
2579
2580 int nargs;
2581 tree fun = check_func (is_store, size_in_bytes, &nargs);
2582 if (nargs == 1)
2583 g = gimple_build_call (fun, 1, base_addr);
2584 else
2585 {
2586 gcc_assert (nargs == 2);
2587 g = gimple_build_assign_with_ops (NOP_EXPR,
2588 make_ssa_name (pointer_sized_int_node,
2589 NULL),
2590 len, NULL_TREE);
2591 gimple_set_location (g, loc);
2592 gsi_insert_before (iter, g, GSI_SAME_STMT);
2593 tree sz_arg = gimple_assign_lhs (g);
2594 g = gimple_build_call (fun, nargs, base_addr, sz_arg);
2595 }
2596 gimple_set_location (g, loc);
2597 gsi_replace (iter, g, false);
2598 return false;
2599 }
2600
2601 HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
2602
2603 tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
2604 tree shadow_type = TREE_TYPE (shadow_ptr_type);
2605
2606 gimple_stmt_iterator gsi = *iter;
2607
2608 if (!is_non_zero_len)
2609 {
2610 /* So, the length of the memory area to asan-protect is
2611 non-constant. Let's guard the generated instrumentation code
2612 like:
2613
2614 if (len != 0)
2615 {
2616 //asan instrumentation code goes here.
2617 }
2618 // falltrough instructions, starting with *ITER. */
2619
2620 g = gimple_build_cond (NE_EXPR,
2621 len,
2622 build_int_cst (TREE_TYPE (len), 0),
2623 NULL_TREE, NULL_TREE);
2624 gimple_set_location (g, loc);
2625
2626 basic_block then_bb, fallthrough_bb;
2627 insert_if_then_before_iter (g, iter, /*then_more_likely_p=*/true,
2628 &then_bb, &fallthrough_bb);
2629 /* Note that fallthrough_bb starts with the statement that was
2630 pointed to by ITER. */
2631
2632 /* The 'then block' of the 'if (len != 0) condition is where
2633 we'll generate the asan instrumentation code now. */
2634 gsi = gsi_last_bb (then_bb);
2635 }
2636
2637 /* Get an iterator on the point where we can add the condition
2638 statement for the instrumentation. */
2639 basic_block then_bb, else_bb;
2640 gsi = create_cond_insert_point (&gsi, /*before_p*/false,
2641 /*then_more_likely_p=*/false,
2642 /*create_then_fallthru_edge=*/false,
2643 &then_bb,
2644 &else_bb);
2645
2646 g = gimple_build_assign_with_ops (NOP_EXPR,
2647 make_ssa_name (pointer_sized_int_node,
2648 NULL),
2649 base, NULL_TREE);
2650 gimple_set_location (g, loc);
2651 gsi_insert_before (&gsi, g, GSI_NEW_STMT);
2652 tree base_addr = gimple_assign_lhs (g);
2653
2654 tree t = NULL_TREE;
2655 if (real_size_in_bytes >= 8)
2656 {
2657 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2658 shadow_ptr_type);
2659 t = shadow;
2660 }
2661 else
2662 {
2663 /* Slow path for 1, 2 and 4 byte accesses. */
2664
2665 if (!start_instrumented)
2666 {
2667 /* Test (shadow != 0)
2668 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
2669 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2670 shadow_ptr_type);
2671 gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
2672 gimple_seq seq = NULL;
2673 gimple_seq_add_stmt (&seq, shadow_test);
2674 /* Aligned (>= 8 bytes) can test just
2675 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
2676 to be 0. */
2677 if (align < 8)
2678 {
2679 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2680 base_addr, 7));
2681 gimple_seq_add_stmt (&seq,
2682 build_type_cast (shadow_type,
2683 gimple_seq_last (seq)));
2684 if (real_size_in_bytes > 1)
2685 gimple_seq_add_stmt (&seq,
2686 build_assign (PLUS_EXPR,
2687 gimple_seq_last (seq),
2688 real_size_in_bytes - 1));
2689 t = gimple_assign_lhs (gimple_seq_last_stmt (seq));
2690 }
2691 else
2692 t = build_int_cst (shadow_type, real_size_in_bytes - 1);
2693 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow));
2694 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2695 gimple_seq_last (seq)));
2696 t = gimple_assign_lhs (gimple_seq_last (seq));
2697 gimple_seq_set_location (seq, loc);
2698 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2699 }
2700
2701 /* For non-constant, misaligned or otherwise weird access sizes,
2702 check first and last byte. */
2703 if (size_in_bytes == -1 && !end_instrumented)
2704 {
2705 g = gimple_build_assign_with_ops (MINUS_EXPR,
2706 make_ssa_name (pointer_sized_int_node, NULL),
2707 len,
2708 build_int_cst (pointer_sized_int_node, 1));
2709 gimple_set_location (g, loc);
2710 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2711 tree last = gimple_assign_lhs (g);
2712 g = gimple_build_assign_with_ops (PLUS_EXPR,
2713 make_ssa_name (pointer_sized_int_node, NULL),
2714 base_addr,
2715 last);
2716 gimple_set_location (g, loc);
2717 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2718 tree base_end_addr = gimple_assign_lhs (g);
2719
2720 tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
2721 shadow_ptr_type);
2722 gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
2723 gimple_seq seq = NULL;
2724 gimple_seq_add_stmt (&seq, shadow_test);
2725 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2726 base_end_addr, 7));
2727 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
2728 gimple_seq_last (seq)));
2729 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
2730 gimple_seq_last (seq),
2731 shadow));
2732 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2733 gimple_seq_last (seq)));
2734 if (!start_instrumented)
2735 gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
2736 gimple_seq_last (seq)));
2737 t = gimple_assign_lhs (gimple_seq_last (seq));
2738 gimple_seq_set_location (seq, loc);
2739 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2740 }
2741 }
2742
2743 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
2744 NULL_TREE, NULL_TREE);
2745 gimple_set_location (g, loc);
2746 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2747
2748 /* Generate call to the run-time library (e.g. __asan_report_load8). */
2749 gsi = gsi_start_bb (then_bb);
2750 int nargs;
2751 tree fun = report_error_func (is_store, size_in_bytes, &nargs);
2752 g = gimple_build_call (fun, nargs, base_addr, len);
2753 gimple_set_location (g, loc);
2754 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2755
2756 gsi_remove (iter, true);
2757 *iter = gsi_start_bb (else_bb);
2758
2759 return true;
2760 }
2761
2762 /* Instrument the current function. */
2763
2764 static unsigned int
2765 asan_instrument (void)
2766 {
2767 if (shadow_ptr_types[0] == NULL_TREE)
2768 asan_init_shadow_ptr_types ();
2769 transform_statements ();
2770 return 0;
2771 }
2772
2773 static bool
2774 gate_asan (void)
2775 {
2776 return (flag_sanitize & SANITIZE_ADDRESS) != 0
2777 && !lookup_attribute ("no_sanitize_address",
2778 DECL_ATTRIBUTES (current_function_decl));
2779 }
2780
2781 namespace {
2782
2783 const pass_data pass_data_asan =
2784 {
2785 GIMPLE_PASS, /* type */
2786 "asan", /* name */
2787 OPTGROUP_NONE, /* optinfo_flags */
2788 TV_NONE, /* tv_id */
2789 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2790 0, /* properties_provided */
2791 0, /* properties_destroyed */
2792 0, /* todo_flags_start */
2793 TODO_update_ssa, /* todo_flags_finish */
2794 };
2795
2796 class pass_asan : public gimple_opt_pass
2797 {
2798 public:
2799 pass_asan (gcc::context *ctxt)
2800 : gimple_opt_pass (pass_data_asan, ctxt)
2801 {}
2802
2803 /* opt_pass methods: */
2804 opt_pass * clone () { return new pass_asan (m_ctxt); }
2805 virtual bool gate (function *) { return gate_asan (); }
2806 virtual unsigned int execute (function *) { return asan_instrument (); }
2807
2808 }; // class pass_asan
2809
2810 } // anon namespace
2811
2812 gimple_opt_pass *
2813 make_pass_asan (gcc::context *ctxt)
2814 {
2815 return new pass_asan (ctxt);
2816 }
2817
2818 namespace {
2819
2820 const pass_data pass_data_asan_O0 =
2821 {
2822 GIMPLE_PASS, /* type */
2823 "asan0", /* name */
2824 OPTGROUP_NONE, /* optinfo_flags */
2825 TV_NONE, /* tv_id */
2826 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2827 0, /* properties_provided */
2828 0, /* properties_destroyed */
2829 0, /* todo_flags_start */
2830 TODO_update_ssa, /* todo_flags_finish */
2831 };
2832
2833 class pass_asan_O0 : public gimple_opt_pass
2834 {
2835 public:
2836 pass_asan_O0 (gcc::context *ctxt)
2837 : gimple_opt_pass (pass_data_asan_O0, ctxt)
2838 {}
2839
2840 /* opt_pass methods: */
2841 virtual bool gate (function *) { return !optimize && gate_asan (); }
2842 virtual unsigned int execute (function *) { return asan_instrument (); }
2843
2844 }; // class pass_asan_O0
2845
2846 } // anon namespace
2847
2848 gimple_opt_pass *
2849 make_pass_asan_O0 (gcc::context *ctxt)
2850 {
2851 return new pass_asan_O0 (ctxt);
2852 }
2853
2854 /* Perform optimization of sanitize functions. */
2855
2856 namespace {
2857
2858 const pass_data pass_data_sanopt =
2859 {
2860 GIMPLE_PASS, /* type */
2861 "sanopt", /* name */
2862 OPTGROUP_NONE, /* optinfo_flags */
2863 TV_NONE, /* tv_id */
2864 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2865 0, /* properties_provided */
2866 0, /* properties_destroyed */
2867 0, /* todo_flags_start */
2868 TODO_update_ssa, /* todo_flags_finish */
2869 };
2870
2871 class pass_sanopt : public gimple_opt_pass
2872 {
2873 public:
2874 pass_sanopt (gcc::context *ctxt)
2875 : gimple_opt_pass (pass_data_sanopt, ctxt)
2876 {}
2877
2878 /* opt_pass methods: */
2879 virtual bool gate (function *) { return flag_sanitize; }
2880 virtual unsigned int execute (function *);
2881
2882 }; // class pass_sanopt
2883
2884 unsigned int
2885 pass_sanopt::execute (function *fun)
2886 {
2887 basic_block bb;
2888
2889 int asan_num_accesses = 0;
2890 if (flag_sanitize & SANITIZE_ADDRESS)
2891 {
2892 gimple_stmt_iterator gsi;
2893 FOR_EACH_BB_FN (bb, fun)
2894 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2895 {
2896 gimple stmt = gsi_stmt (gsi);
2897 if (is_gimple_call (stmt) && gimple_call_internal_p (stmt)
2898 && gimple_call_internal_fn (stmt) == IFN_ASAN_CHECK)
2899 ++asan_num_accesses;
2900 }
2901 }
2902
2903 bool use_calls = ASAN_INSTRUMENTATION_WITH_CALL_THRESHOLD < INT_MAX
2904 && asan_num_accesses >= ASAN_INSTRUMENTATION_WITH_CALL_THRESHOLD;
2905
2906 FOR_EACH_BB_FN (bb, fun)
2907 {
2908 gimple_stmt_iterator gsi;
2909 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
2910 {
2911 gimple stmt = gsi_stmt (gsi);
2912 bool no_next = false;
2913
2914 if (!is_gimple_call (stmt))
2915 {
2916 gsi_next (&gsi);
2917 continue;
2918 }
2919
2920 if (gimple_call_internal_p (stmt))
2921 {
2922 enum internal_fn ifn = gimple_call_internal_fn (stmt);
2923 switch (ifn)
2924 {
2925 case IFN_UBSAN_NULL:
2926 no_next = ubsan_expand_null_ifn (&gsi);
2927 break;
2928 case IFN_UBSAN_BOUNDS:
2929 no_next = ubsan_expand_bounds_ifn (&gsi);
2930 break;
2931 case IFN_UBSAN_OBJECT_SIZE:
2932 no_next = ubsan_expand_objsize_ifn (&gsi);
2933 break;
2934 case IFN_ASAN_CHECK:
2935 no_next = asan_expand_check_ifn (&gsi, use_calls);
2936 break;
2937 default:
2938 break;
2939 }
2940 }
2941
2942 if (dump_file && (dump_flags & TDF_DETAILS))
2943 {
2944 fprintf (dump_file, "Optimized\n ");
2945 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2946 fprintf (dump_file, "\n");
2947 }
2948
2949 if (!no_next)
2950 gsi_next (&gsi);
2951 }
2952 }
2953 return 0;
2954 }
2955
2956 } // anon namespace
2957
2958 gimple_opt_pass *
2959 make_pass_sanopt (gcc::context *ctxt)
2960 {
2961 return new pass_sanopt (ctxt);
2962 }
2963
2964 #include "gt-asan.h"