asan.c (report_error_func): Add SLOW_P argument, use BUILT_IN_ASAN_*_N if set.
[gcc.git] / gcc / asan.c
1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2014 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tree.h"
26 #include "hash-table.h"
27 #include "basic-block.h"
28 #include "tree-ssa-alias.h"
29 #include "internal-fn.h"
30 #include "gimple-expr.h"
31 #include "is-a.h"
32 #include "gimple.h"
33 #include "gimplify.h"
34 #include "gimple-iterator.h"
35 #include "calls.h"
36 #include "varasm.h"
37 #include "stor-layout.h"
38 #include "tree-iterator.h"
39 #include "cgraph.h"
40 #include "stringpool.h"
41 #include "tree-ssanames.h"
42 #include "tree-pass.h"
43 #include "asan.h"
44 #include "gimple-pretty-print.h"
45 #include "target.h"
46 #include "expr.h"
47 #include "optabs.h"
48 #include "output.h"
49 #include "tm_p.h"
50 #include "langhooks.h"
51 #include "alloc-pool.h"
52 #include "cfgloop.h"
53 #include "gimple-builder.h"
54 #include "ubsan.h"
55 #include "predict.h"
56 #include "params.h"
57
58 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
59 with <2x slowdown on average.
60
61 The tool consists of two parts:
62 instrumentation module (this file) and a run-time library.
63 The instrumentation module adds a run-time check before every memory insn.
64 For a 8- or 16- byte load accessing address X:
65 ShadowAddr = (X >> 3) + Offset
66 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
67 if (ShadowValue)
68 __asan_report_load8(X);
69 For a load of N bytes (N=1, 2 or 4) from address X:
70 ShadowAddr = (X >> 3) + Offset
71 ShadowValue = *(char*)ShadowAddr;
72 if (ShadowValue)
73 if ((X & 7) + N - 1 > ShadowValue)
74 __asan_report_loadN(X);
75 Stores are instrumented similarly, but using __asan_report_storeN functions.
76 A call too __asan_init_vN() is inserted to the list of module CTORs.
77 N is the version number of the AddressSanitizer API. The changes between the
78 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
79
80 The run-time library redefines malloc (so that redzone are inserted around
81 the allocated memory) and free (so that reuse of free-ed memory is delayed),
82 provides __asan_report* and __asan_init_vN functions.
83
84 Read more:
85 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
86
87 The current implementation supports detection of out-of-bounds and
88 use-after-free in the heap, on the stack and for global variables.
89
90 [Protection of stack variables]
91
92 To understand how detection of out-of-bounds and use-after-free works
93 for stack variables, lets look at this example on x86_64 where the
94 stack grows downward:
95
96 int
97 foo ()
98 {
99 char a[23] = {0};
100 int b[2] = {0};
101
102 a[5] = 1;
103 b[1] = 2;
104
105 return a[5] + b[1];
106 }
107
108 For this function, the stack protected by asan will be organized as
109 follows, from the top of the stack to the bottom:
110
111 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
112
113 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
114 the next slot be 32 bytes aligned; this one is called Partial
115 Redzone; this 32 bytes alignment is an asan constraint]
116
117 Slot 3/ [24 bytes for variable 'a']
118
119 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
120
121 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
122
123 Slot 6/ [8 bytes for variable 'b']
124
125 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
126 'LEFT RedZone']
127
128 The 32 bytes of LEFT red zone at the bottom of the stack can be
129 decomposed as such:
130
131 1/ The first 8 bytes contain a magical asan number that is always
132 0x41B58AB3.
133
134 2/ The following 8 bytes contains a pointer to a string (to be
135 parsed at runtime by the runtime asan library), which format is
136 the following:
137
138 "<function-name> <space> <num-of-variables-on-the-stack>
139 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
140 <length-of-var-in-bytes> ){n} "
141
142 where '(...){n}' means the content inside the parenthesis occurs 'n'
143 times, with 'n' being the number of variables on the stack.
144
145 3/ The following 8 bytes contain the PC of the current function which
146 will be used by the run-time library to print an error message.
147
148 4/ The following 8 bytes are reserved for internal use by the run-time.
149
150 The shadow memory for that stack layout is going to look like this:
151
152 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
153 The F1 byte pattern is a magic number called
154 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
155 the memory for that shadow byte is part of a the LEFT red zone
156 intended to seat at the bottom of the variables on the stack.
157
158 - content of shadow memory 8 bytes for slots 6 and 5:
159 0xF4F4F400. The F4 byte pattern is a magic number
160 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
161 memory region for this shadow byte is a PARTIAL red zone
162 intended to pad a variable A, so that the slot following
163 {A,padding} is 32 bytes aligned.
164
165 Note that the fact that the least significant byte of this
166 shadow memory content is 00 means that 8 bytes of its
167 corresponding memory (which corresponds to the memory of
168 variable 'b') is addressable.
169
170 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
171 The F2 byte pattern is a magic number called
172 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
173 region for this shadow byte is a MIDDLE red zone intended to
174 seat between two 32 aligned slots of {variable,padding}.
175
176 - content of shadow memory 8 bytes for slot 3 and 2:
177 0xF4000000. This represents is the concatenation of
178 variable 'a' and the partial red zone following it, like what we
179 had for variable 'b'. The least significant 3 bytes being 00
180 means that the 3 bytes of variable 'a' are addressable.
181
182 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
183 The F3 byte pattern is a magic number called
184 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
185 region for this shadow byte is a RIGHT red zone intended to seat
186 at the top of the variables of the stack.
187
188 Note that the real variable layout is done in expand_used_vars in
189 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
190 stack variables as well as the different red zones, emits some
191 prologue code to populate the shadow memory as to poison (mark as
192 non-accessible) the regions of the red zones and mark the regions of
193 stack variables as accessible, and emit some epilogue code to
194 un-poison (mark as accessible) the regions of red zones right before
195 the function exits.
196
197 [Protection of global variables]
198
199 The basic idea is to insert a red zone between two global variables
200 and install a constructor function that calls the asan runtime to do
201 the populating of the relevant shadow memory regions at load time.
202
203 So the global variables are laid out as to insert a red zone between
204 them. The size of the red zones is so that each variable starts on a
205 32 bytes boundary.
206
207 Then a constructor function is installed so that, for each global
208 variable, it calls the runtime asan library function
209 __asan_register_globals_with an instance of this type:
210
211 struct __asan_global
212 {
213 // Address of the beginning of the global variable.
214 const void *__beg;
215
216 // Initial size of the global variable.
217 uptr __size;
218
219 // Size of the global variable + size of the red zone. This
220 // size is 32 bytes aligned.
221 uptr __size_with_redzone;
222
223 // Name of the global variable.
224 const void *__name;
225
226 // Name of the module where the global variable is declared.
227 const void *__module_name;
228
229 // 1 if it has dynamic initialization, 0 otherwise.
230 uptr __has_dynamic_init;
231 }
232
233 A destructor function that calls the runtime asan library function
234 _asan_unregister_globals is also installed. */
235
236 alias_set_type asan_shadow_set = -1;
237
238 /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
239 alias set is used for all shadow memory accesses. */
240 static GTY(()) tree shadow_ptr_types[2];
241
242 /* Decl for __asan_option_detect_stack_use_after_return. */
243 static GTY(()) tree asan_detect_stack_use_after_return;
244
245 /* Hashtable support for memory references used by gimple
246 statements. */
247
248 /* This type represents a reference to a memory region. */
249 struct asan_mem_ref
250 {
251 /* The expression of the beginning of the memory region. */
252 tree start;
253
254 /* The size of the access. */
255 HOST_WIDE_INT access_size;
256 };
257
258 static alloc_pool asan_mem_ref_alloc_pool;
259
260 /* This creates the alloc pool used to store the instances of
261 asan_mem_ref that are stored in the hash table asan_mem_ref_ht. */
262
263 static alloc_pool
264 asan_mem_ref_get_alloc_pool ()
265 {
266 if (asan_mem_ref_alloc_pool == NULL)
267 asan_mem_ref_alloc_pool = create_alloc_pool ("asan_mem_ref",
268 sizeof (asan_mem_ref),
269 10);
270 return asan_mem_ref_alloc_pool;
271
272 }
273
274 /* Initializes an instance of asan_mem_ref. */
275
276 static void
277 asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
278 {
279 ref->start = start;
280 ref->access_size = access_size;
281 }
282
283 /* Allocates memory for an instance of asan_mem_ref into the memory
284 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
285 START is the address of (or the expression pointing to) the
286 beginning of memory reference. ACCESS_SIZE is the size of the
287 access to the referenced memory. */
288
289 static asan_mem_ref*
290 asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
291 {
292 asan_mem_ref *ref =
293 (asan_mem_ref *) pool_alloc (asan_mem_ref_get_alloc_pool ());
294
295 asan_mem_ref_init (ref, start, access_size);
296 return ref;
297 }
298
299 /* This builds and returns a pointer to the end of the memory region
300 that starts at START and of length LEN. */
301
302 tree
303 asan_mem_ref_get_end (tree start, tree len)
304 {
305 if (len == NULL_TREE || integer_zerop (len))
306 return start;
307
308 return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
309 }
310
311 /* Return a tree expression that represents the end of the referenced
312 memory region. Beware that this function can actually build a new
313 tree expression. */
314
315 tree
316 asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
317 {
318 return asan_mem_ref_get_end (ref->start, len);
319 }
320
321 struct asan_mem_ref_hasher
322 : typed_noop_remove <asan_mem_ref>
323 {
324 typedef asan_mem_ref value_type;
325 typedef asan_mem_ref compare_type;
326
327 static inline hashval_t hash (const value_type *);
328 static inline bool equal (const value_type *, const compare_type *);
329 };
330
331 /* Hash a memory reference. */
332
333 inline hashval_t
334 asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
335 {
336 hashval_t h = iterative_hash_expr (mem_ref->start, 0);
337 h = iterative_hash_host_wide_int (mem_ref->access_size, h);
338 return h;
339 }
340
341 /* Compare two memory references. We accept the length of either
342 memory references to be NULL_TREE. */
343
344 inline bool
345 asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
346 const asan_mem_ref *m2)
347 {
348 return (m1->access_size == m2->access_size
349 && operand_equal_p (m1->start, m2->start, 0));
350 }
351
352 static hash_table <asan_mem_ref_hasher> asan_mem_ref_ht;
353
354 /* Returns a reference to the hash table containing memory references.
355 This function ensures that the hash table is created. Note that
356 this hash table is updated by the function
357 update_mem_ref_hash_table. */
358
359 static hash_table <asan_mem_ref_hasher> &
360 get_mem_ref_hash_table ()
361 {
362 if (!asan_mem_ref_ht.is_created ())
363 asan_mem_ref_ht.create (10);
364
365 return asan_mem_ref_ht;
366 }
367
368 /* Clear all entries from the memory references hash table. */
369
370 static void
371 empty_mem_ref_hash_table ()
372 {
373 if (asan_mem_ref_ht.is_created ())
374 asan_mem_ref_ht.empty ();
375 }
376
377 /* Free the memory references hash table. */
378
379 static void
380 free_mem_ref_resources ()
381 {
382 if (asan_mem_ref_ht.is_created ())
383 asan_mem_ref_ht.dispose ();
384
385 if (asan_mem_ref_alloc_pool)
386 {
387 free_alloc_pool (asan_mem_ref_alloc_pool);
388 asan_mem_ref_alloc_pool = NULL;
389 }
390 }
391
392 /* Return true iff the memory reference REF has been instrumented. */
393
394 static bool
395 has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
396 {
397 asan_mem_ref r;
398 asan_mem_ref_init (&r, ref, access_size);
399
400 return (get_mem_ref_hash_table ().find (&r) != NULL);
401 }
402
403 /* Return true iff the memory reference REF has been instrumented. */
404
405 static bool
406 has_mem_ref_been_instrumented (const asan_mem_ref *ref)
407 {
408 return has_mem_ref_been_instrumented (ref->start, ref->access_size);
409 }
410
411 /* Return true iff access to memory region starting at REF and of
412 length LEN has been instrumented. */
413
414 static bool
415 has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
416 {
417 /* First let's see if the address of the beginning of REF has been
418 instrumented. */
419 if (!has_mem_ref_been_instrumented (ref))
420 return false;
421
422 if (len != 0)
423 {
424 /* Let's see if the end of the region has been instrumented. */
425 if (!has_mem_ref_been_instrumented (asan_mem_ref_get_end (ref, len),
426 ref->access_size))
427 return false;
428 }
429 return true;
430 }
431
432 /* Set REF to the memory reference present in a gimple assignment
433 ASSIGNMENT. Return true upon successful completion, false
434 otherwise. */
435
436 static bool
437 get_mem_ref_of_assignment (const gimple assignment,
438 asan_mem_ref *ref,
439 bool *ref_is_store)
440 {
441 gcc_assert (gimple_assign_single_p (assignment));
442
443 if (gimple_store_p (assignment)
444 && !gimple_clobber_p (assignment))
445 {
446 ref->start = gimple_assign_lhs (assignment);
447 *ref_is_store = true;
448 }
449 else if (gimple_assign_load_p (assignment))
450 {
451 ref->start = gimple_assign_rhs1 (assignment);
452 *ref_is_store = false;
453 }
454 else
455 return false;
456
457 ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
458 return true;
459 }
460
461 /* Return the memory references contained in a gimple statement
462 representing a builtin call that has to do with memory access. */
463
464 static bool
465 get_mem_refs_of_builtin_call (const gimple call,
466 asan_mem_ref *src0,
467 tree *src0_len,
468 bool *src0_is_store,
469 asan_mem_ref *src1,
470 tree *src1_len,
471 bool *src1_is_store,
472 asan_mem_ref *dst,
473 tree *dst_len,
474 bool *dst_is_store,
475 bool *dest_is_deref)
476 {
477 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
478
479 tree callee = gimple_call_fndecl (call);
480 tree source0 = NULL_TREE, source1 = NULL_TREE,
481 dest = NULL_TREE, len = NULL_TREE;
482 bool is_store = true, got_reference_p = false;
483 HOST_WIDE_INT access_size = 1;
484
485 switch (DECL_FUNCTION_CODE (callee))
486 {
487 /* (s, s, n) style memops. */
488 case BUILT_IN_BCMP:
489 case BUILT_IN_MEMCMP:
490 source0 = gimple_call_arg (call, 0);
491 source1 = gimple_call_arg (call, 1);
492 len = gimple_call_arg (call, 2);
493 break;
494
495 /* (src, dest, n) style memops. */
496 case BUILT_IN_BCOPY:
497 source0 = gimple_call_arg (call, 0);
498 dest = gimple_call_arg (call, 1);
499 len = gimple_call_arg (call, 2);
500 break;
501
502 /* (dest, src, n) style memops. */
503 case BUILT_IN_MEMCPY:
504 case BUILT_IN_MEMCPY_CHK:
505 case BUILT_IN_MEMMOVE:
506 case BUILT_IN_MEMMOVE_CHK:
507 case BUILT_IN_MEMPCPY:
508 case BUILT_IN_MEMPCPY_CHK:
509 dest = gimple_call_arg (call, 0);
510 source0 = gimple_call_arg (call, 1);
511 len = gimple_call_arg (call, 2);
512 break;
513
514 /* (dest, n) style memops. */
515 case BUILT_IN_BZERO:
516 dest = gimple_call_arg (call, 0);
517 len = gimple_call_arg (call, 1);
518 break;
519
520 /* (dest, x, n) style memops*/
521 case BUILT_IN_MEMSET:
522 case BUILT_IN_MEMSET_CHK:
523 dest = gimple_call_arg (call, 0);
524 len = gimple_call_arg (call, 2);
525 break;
526
527 case BUILT_IN_STRLEN:
528 source0 = gimple_call_arg (call, 0);
529 len = gimple_call_lhs (call);
530 break ;
531
532 /* And now the __atomic* and __sync builtins.
533 These are handled differently from the classical memory memory
534 access builtins above. */
535
536 case BUILT_IN_ATOMIC_LOAD_1:
537 case BUILT_IN_ATOMIC_LOAD_2:
538 case BUILT_IN_ATOMIC_LOAD_4:
539 case BUILT_IN_ATOMIC_LOAD_8:
540 case BUILT_IN_ATOMIC_LOAD_16:
541 is_store = false;
542 /* fall through. */
543
544 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
545 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
546 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
547 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
548 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
549
550 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
551 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
552 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
553 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
554 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
555
556 case BUILT_IN_SYNC_FETCH_AND_OR_1:
557 case BUILT_IN_SYNC_FETCH_AND_OR_2:
558 case BUILT_IN_SYNC_FETCH_AND_OR_4:
559 case BUILT_IN_SYNC_FETCH_AND_OR_8:
560 case BUILT_IN_SYNC_FETCH_AND_OR_16:
561
562 case BUILT_IN_SYNC_FETCH_AND_AND_1:
563 case BUILT_IN_SYNC_FETCH_AND_AND_2:
564 case BUILT_IN_SYNC_FETCH_AND_AND_4:
565 case BUILT_IN_SYNC_FETCH_AND_AND_8:
566 case BUILT_IN_SYNC_FETCH_AND_AND_16:
567
568 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
569 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
570 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
571 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
572 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
573
574 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
575 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
576 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
577 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
578
579 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
580 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
581 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
582 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
583 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
584
585 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
586 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
587 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
588 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
589 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
590
591 case BUILT_IN_SYNC_OR_AND_FETCH_1:
592 case BUILT_IN_SYNC_OR_AND_FETCH_2:
593 case BUILT_IN_SYNC_OR_AND_FETCH_4:
594 case BUILT_IN_SYNC_OR_AND_FETCH_8:
595 case BUILT_IN_SYNC_OR_AND_FETCH_16:
596
597 case BUILT_IN_SYNC_AND_AND_FETCH_1:
598 case BUILT_IN_SYNC_AND_AND_FETCH_2:
599 case BUILT_IN_SYNC_AND_AND_FETCH_4:
600 case BUILT_IN_SYNC_AND_AND_FETCH_8:
601 case BUILT_IN_SYNC_AND_AND_FETCH_16:
602
603 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
604 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
605 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
606 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
607 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
608
609 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
610 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
611 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
612 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
613
614 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
615 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
616 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
617 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
618 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
619
620 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
621 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
622 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
623 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
624 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
625
626 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
627 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
628 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
629 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
630 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
631
632 case BUILT_IN_SYNC_LOCK_RELEASE_1:
633 case BUILT_IN_SYNC_LOCK_RELEASE_2:
634 case BUILT_IN_SYNC_LOCK_RELEASE_4:
635 case BUILT_IN_SYNC_LOCK_RELEASE_8:
636 case BUILT_IN_SYNC_LOCK_RELEASE_16:
637
638 case BUILT_IN_ATOMIC_EXCHANGE_1:
639 case BUILT_IN_ATOMIC_EXCHANGE_2:
640 case BUILT_IN_ATOMIC_EXCHANGE_4:
641 case BUILT_IN_ATOMIC_EXCHANGE_8:
642 case BUILT_IN_ATOMIC_EXCHANGE_16:
643
644 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
645 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
646 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
647 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
648 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
649
650 case BUILT_IN_ATOMIC_STORE_1:
651 case BUILT_IN_ATOMIC_STORE_2:
652 case BUILT_IN_ATOMIC_STORE_4:
653 case BUILT_IN_ATOMIC_STORE_8:
654 case BUILT_IN_ATOMIC_STORE_16:
655
656 case BUILT_IN_ATOMIC_ADD_FETCH_1:
657 case BUILT_IN_ATOMIC_ADD_FETCH_2:
658 case BUILT_IN_ATOMIC_ADD_FETCH_4:
659 case BUILT_IN_ATOMIC_ADD_FETCH_8:
660 case BUILT_IN_ATOMIC_ADD_FETCH_16:
661
662 case BUILT_IN_ATOMIC_SUB_FETCH_1:
663 case BUILT_IN_ATOMIC_SUB_FETCH_2:
664 case BUILT_IN_ATOMIC_SUB_FETCH_4:
665 case BUILT_IN_ATOMIC_SUB_FETCH_8:
666 case BUILT_IN_ATOMIC_SUB_FETCH_16:
667
668 case BUILT_IN_ATOMIC_AND_FETCH_1:
669 case BUILT_IN_ATOMIC_AND_FETCH_2:
670 case BUILT_IN_ATOMIC_AND_FETCH_4:
671 case BUILT_IN_ATOMIC_AND_FETCH_8:
672 case BUILT_IN_ATOMIC_AND_FETCH_16:
673
674 case BUILT_IN_ATOMIC_NAND_FETCH_1:
675 case BUILT_IN_ATOMIC_NAND_FETCH_2:
676 case BUILT_IN_ATOMIC_NAND_FETCH_4:
677 case BUILT_IN_ATOMIC_NAND_FETCH_8:
678 case BUILT_IN_ATOMIC_NAND_FETCH_16:
679
680 case BUILT_IN_ATOMIC_XOR_FETCH_1:
681 case BUILT_IN_ATOMIC_XOR_FETCH_2:
682 case BUILT_IN_ATOMIC_XOR_FETCH_4:
683 case BUILT_IN_ATOMIC_XOR_FETCH_8:
684 case BUILT_IN_ATOMIC_XOR_FETCH_16:
685
686 case BUILT_IN_ATOMIC_OR_FETCH_1:
687 case BUILT_IN_ATOMIC_OR_FETCH_2:
688 case BUILT_IN_ATOMIC_OR_FETCH_4:
689 case BUILT_IN_ATOMIC_OR_FETCH_8:
690 case BUILT_IN_ATOMIC_OR_FETCH_16:
691
692 case BUILT_IN_ATOMIC_FETCH_ADD_1:
693 case BUILT_IN_ATOMIC_FETCH_ADD_2:
694 case BUILT_IN_ATOMIC_FETCH_ADD_4:
695 case BUILT_IN_ATOMIC_FETCH_ADD_8:
696 case BUILT_IN_ATOMIC_FETCH_ADD_16:
697
698 case BUILT_IN_ATOMIC_FETCH_SUB_1:
699 case BUILT_IN_ATOMIC_FETCH_SUB_2:
700 case BUILT_IN_ATOMIC_FETCH_SUB_4:
701 case BUILT_IN_ATOMIC_FETCH_SUB_8:
702 case BUILT_IN_ATOMIC_FETCH_SUB_16:
703
704 case BUILT_IN_ATOMIC_FETCH_AND_1:
705 case BUILT_IN_ATOMIC_FETCH_AND_2:
706 case BUILT_IN_ATOMIC_FETCH_AND_4:
707 case BUILT_IN_ATOMIC_FETCH_AND_8:
708 case BUILT_IN_ATOMIC_FETCH_AND_16:
709
710 case BUILT_IN_ATOMIC_FETCH_NAND_1:
711 case BUILT_IN_ATOMIC_FETCH_NAND_2:
712 case BUILT_IN_ATOMIC_FETCH_NAND_4:
713 case BUILT_IN_ATOMIC_FETCH_NAND_8:
714 case BUILT_IN_ATOMIC_FETCH_NAND_16:
715
716 case BUILT_IN_ATOMIC_FETCH_XOR_1:
717 case BUILT_IN_ATOMIC_FETCH_XOR_2:
718 case BUILT_IN_ATOMIC_FETCH_XOR_4:
719 case BUILT_IN_ATOMIC_FETCH_XOR_8:
720 case BUILT_IN_ATOMIC_FETCH_XOR_16:
721
722 case BUILT_IN_ATOMIC_FETCH_OR_1:
723 case BUILT_IN_ATOMIC_FETCH_OR_2:
724 case BUILT_IN_ATOMIC_FETCH_OR_4:
725 case BUILT_IN_ATOMIC_FETCH_OR_8:
726 case BUILT_IN_ATOMIC_FETCH_OR_16:
727 {
728 dest = gimple_call_arg (call, 0);
729 /* DEST represents the address of a memory location.
730 instrument_derefs wants the memory location, so lets
731 dereference the address DEST before handing it to
732 instrument_derefs. */
733 if (TREE_CODE (dest) == ADDR_EXPR)
734 dest = TREE_OPERAND (dest, 0);
735 else if (TREE_CODE (dest) == SSA_NAME || TREE_CODE (dest) == INTEGER_CST)
736 dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
737 dest, build_int_cst (TREE_TYPE (dest), 0));
738 else
739 gcc_unreachable ();
740
741 access_size = int_size_in_bytes (TREE_TYPE (dest));
742 }
743
744 default:
745 /* The other builtins memory access are not instrumented in this
746 function because they either don't have any length parameter,
747 or their length parameter is just a limit. */
748 break;
749 }
750
751 if (len != NULL_TREE)
752 {
753 if (source0 != NULL_TREE)
754 {
755 src0->start = source0;
756 src0->access_size = access_size;
757 *src0_len = len;
758 *src0_is_store = false;
759 }
760
761 if (source1 != NULL_TREE)
762 {
763 src1->start = source1;
764 src1->access_size = access_size;
765 *src1_len = len;
766 *src1_is_store = false;
767 }
768
769 if (dest != NULL_TREE)
770 {
771 dst->start = dest;
772 dst->access_size = access_size;
773 *dst_len = len;
774 *dst_is_store = true;
775 }
776
777 got_reference_p = true;
778 }
779 else if (dest)
780 {
781 dst->start = dest;
782 dst->access_size = access_size;
783 *dst_len = NULL_TREE;
784 *dst_is_store = is_store;
785 *dest_is_deref = true;
786 got_reference_p = true;
787 }
788
789 return got_reference_p;
790 }
791
792 /* Return true iff a given gimple statement has been instrumented.
793 Note that the statement is "defined" by the memory references it
794 contains. */
795
796 static bool
797 has_stmt_been_instrumented_p (gimple stmt)
798 {
799 if (gimple_assign_single_p (stmt))
800 {
801 bool r_is_store;
802 asan_mem_ref r;
803 asan_mem_ref_init (&r, NULL, 1);
804
805 if (get_mem_ref_of_assignment (stmt, &r, &r_is_store))
806 return has_mem_ref_been_instrumented (&r);
807 }
808 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
809 {
810 asan_mem_ref src0, src1, dest;
811 asan_mem_ref_init (&src0, NULL, 1);
812 asan_mem_ref_init (&src1, NULL, 1);
813 asan_mem_ref_init (&dest, NULL, 1);
814
815 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
816 bool src0_is_store = false, src1_is_store = false,
817 dest_is_store = false, dest_is_deref = false;
818 if (get_mem_refs_of_builtin_call (stmt,
819 &src0, &src0_len, &src0_is_store,
820 &src1, &src1_len, &src1_is_store,
821 &dest, &dest_len, &dest_is_store,
822 &dest_is_deref))
823 {
824 if (src0.start != NULL_TREE
825 && !has_mem_ref_been_instrumented (&src0, src0_len))
826 return false;
827
828 if (src1.start != NULL_TREE
829 && !has_mem_ref_been_instrumented (&src1, src1_len))
830 return false;
831
832 if (dest.start != NULL_TREE
833 && !has_mem_ref_been_instrumented (&dest, dest_len))
834 return false;
835
836 return true;
837 }
838 }
839 return false;
840 }
841
842 /* Insert a memory reference into the hash table. */
843
844 static void
845 update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
846 {
847 hash_table <asan_mem_ref_hasher> ht = get_mem_ref_hash_table ();
848
849 asan_mem_ref r;
850 asan_mem_ref_init (&r, ref, access_size);
851
852 asan_mem_ref **slot = ht.find_slot (&r, INSERT);
853 if (*slot == NULL)
854 *slot = asan_mem_ref_new (ref, access_size);
855 }
856
857 /* Initialize shadow_ptr_types array. */
858
859 static void
860 asan_init_shadow_ptr_types (void)
861 {
862 asan_shadow_set = new_alias_set ();
863 shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
864 TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
865 shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
866 shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
867 TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
868 shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
869 initialize_sanitizer_builtins ();
870 }
871
872 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
873
874 static tree
875 asan_pp_string (pretty_printer *pp)
876 {
877 const char *buf = pp_formatted_text (pp);
878 size_t len = strlen (buf);
879 tree ret = build_string (len + 1, buf);
880 TREE_TYPE (ret)
881 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
882 build_index_type (size_int (len)));
883 TREE_READONLY (ret) = 1;
884 TREE_STATIC (ret) = 1;
885 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
886 }
887
888 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
889
890 static rtx
891 asan_shadow_cst (unsigned char shadow_bytes[4])
892 {
893 int i;
894 unsigned HOST_WIDE_INT val = 0;
895 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
896 for (i = 0; i < 4; i++)
897 val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
898 << (BITS_PER_UNIT * i);
899 return gen_int_mode (val, SImode);
900 }
901
902 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
903 though. */
904
905 static void
906 asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
907 {
908 rtx insn, insns, top_label, end, addr, tmp, jump;
909
910 start_sequence ();
911 clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
912 insns = get_insns ();
913 end_sequence ();
914 for (insn = insns; insn; insn = NEXT_INSN (insn))
915 if (CALL_P (insn))
916 break;
917 if (insn == NULL_RTX)
918 {
919 emit_insn (insns);
920 return;
921 }
922
923 gcc_assert ((len & 3) == 0);
924 top_label = gen_label_rtx ();
925 addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
926 shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
927 end = force_reg (Pmode, plus_constant (Pmode, addr, len));
928 emit_label (top_label);
929
930 emit_move_insn (shadow_mem, const0_rtx);
931 tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
932 true, OPTAB_LIB_WIDEN);
933 if (tmp != addr)
934 emit_move_insn (addr, tmp);
935 emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
936 jump = get_last_insn ();
937 gcc_assert (JUMP_P (jump));
938 add_int_reg_note (jump, REG_BR_PROB, REG_BR_PROB_BASE * 80 / 100);
939 }
940
941 void
942 asan_function_start (void)
943 {
944 section *fnsec = function_section (current_function_decl);
945 switch_to_section (fnsec);
946 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
947 current_function_funcdef_no);
948 }
949
950 /* Insert code to protect stack vars. The prologue sequence should be emitted
951 directly, epilogue sequence returned. BASE is the register holding the
952 stack base, against which OFFSETS array offsets are relative to, OFFSETS
953 array contains pairs of offsets in reverse order, always the end offset
954 of some gap that needs protection followed by starting offset,
955 and DECLS is an array of representative decls for each var partition.
956 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
957 elements long (OFFSETS include gap before the first variable as well
958 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
959 register which stack vars DECL_RTLs are based on. Either BASE should be
960 assigned to PBASE, when not doing use after return protection, or
961 corresponding address based on __asan_stack_malloc* return value. */
962
963 rtx
964 asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
965 HOST_WIDE_INT *offsets, tree *decls, int length)
966 {
967 rtx shadow_base, shadow_mem, ret, mem, orig_base, lab;
968 char buf[30];
969 unsigned char shadow_bytes[4];
970 HOST_WIDE_INT base_offset = offsets[length - 1];
971 HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
972 HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
973 HOST_WIDE_INT last_offset, last_size;
974 int l;
975 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
976 tree str_cst, decl, id;
977 int use_after_return_class = -1;
978
979 if (shadow_ptr_types[0] == NULL_TREE)
980 asan_init_shadow_ptr_types ();
981
982 /* First of all, prepare the description string. */
983 pretty_printer asan_pp;
984
985 pp_decimal_int (&asan_pp, length / 2 - 1);
986 pp_space (&asan_pp);
987 for (l = length - 2; l; l -= 2)
988 {
989 tree decl = decls[l / 2 - 1];
990 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
991 pp_space (&asan_pp);
992 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
993 pp_space (&asan_pp);
994 if (DECL_P (decl) && DECL_NAME (decl))
995 {
996 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
997 pp_space (&asan_pp);
998 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
999 }
1000 else
1001 pp_string (&asan_pp, "9 <unknown>");
1002 pp_space (&asan_pp);
1003 }
1004 str_cst = asan_pp_string (&asan_pp);
1005
1006 /* Emit the prologue sequence. */
1007 if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
1008 && ASAN_USE_AFTER_RETURN)
1009 {
1010 use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
1011 /* __asan_stack_malloc_N guarantees alignment
1012 N < 6 ? (64 << N) : 4096 bytes. */
1013 if (alignb > (use_after_return_class < 6
1014 ? (64U << use_after_return_class) : 4096U))
1015 use_after_return_class = -1;
1016 else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
1017 base_align_bias = ((asan_frame_size + alignb - 1)
1018 & ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
1019 }
1020 /* Align base if target is STRICT_ALIGNMENT. */
1021 if (STRICT_ALIGNMENT)
1022 base = expand_binop (Pmode, and_optab, base,
1023 gen_int_mode (-((GET_MODE_ALIGNMENT (SImode)
1024 << ASAN_SHADOW_SHIFT)
1025 / BITS_PER_UNIT), Pmode), NULL_RTX,
1026 1, OPTAB_DIRECT);
1027
1028 if (use_after_return_class == -1 && pbase)
1029 emit_move_insn (pbase, base);
1030
1031 base = expand_binop (Pmode, add_optab, base,
1032 gen_int_mode (base_offset - base_align_bias, Pmode),
1033 NULL_RTX, 1, OPTAB_DIRECT);
1034 orig_base = NULL_RTX;
1035 if (use_after_return_class != -1)
1036 {
1037 if (asan_detect_stack_use_after_return == NULL_TREE)
1038 {
1039 id = get_identifier ("__asan_option_detect_stack_use_after_return");
1040 decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
1041 integer_type_node);
1042 SET_DECL_ASSEMBLER_NAME (decl, id);
1043 TREE_ADDRESSABLE (decl) = 1;
1044 DECL_ARTIFICIAL (decl) = 1;
1045 DECL_IGNORED_P (decl) = 1;
1046 DECL_EXTERNAL (decl) = 1;
1047 TREE_STATIC (decl) = 1;
1048 TREE_PUBLIC (decl) = 1;
1049 TREE_USED (decl) = 1;
1050 asan_detect_stack_use_after_return = decl;
1051 }
1052 orig_base = gen_reg_rtx (Pmode);
1053 emit_move_insn (orig_base, base);
1054 ret = expand_normal (asan_detect_stack_use_after_return);
1055 lab = gen_label_rtx ();
1056 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1057 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1058 VOIDmode, 0, lab, very_likely);
1059 snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
1060 use_after_return_class);
1061 ret = init_one_libfunc (buf);
1062 rtx addr = convert_memory_address (ptr_mode, base);
1063 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 2,
1064 GEN_INT (asan_frame_size
1065 + base_align_bias),
1066 TYPE_MODE (pointer_sized_int_node),
1067 addr, ptr_mode);
1068 ret = convert_memory_address (Pmode, ret);
1069 emit_move_insn (base, ret);
1070 emit_label (lab);
1071 emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
1072 gen_int_mode (base_align_bias
1073 - base_offset, Pmode),
1074 NULL_RTX, 1, OPTAB_DIRECT));
1075 }
1076 mem = gen_rtx_MEM (ptr_mode, base);
1077 mem = adjust_address (mem, VOIDmode, base_align_bias);
1078 emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
1079 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1080 emit_move_insn (mem, expand_normal (str_cst));
1081 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1082 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
1083 id = get_identifier (buf);
1084 decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1085 VAR_DECL, id, char_type_node);
1086 SET_DECL_ASSEMBLER_NAME (decl, id);
1087 TREE_ADDRESSABLE (decl) = 1;
1088 TREE_READONLY (decl) = 1;
1089 DECL_ARTIFICIAL (decl) = 1;
1090 DECL_IGNORED_P (decl) = 1;
1091 TREE_STATIC (decl) = 1;
1092 TREE_PUBLIC (decl) = 0;
1093 TREE_USED (decl) = 1;
1094 DECL_INITIAL (decl) = decl;
1095 TREE_ASM_WRITTEN (decl) = 1;
1096 TREE_ASM_WRITTEN (id) = 1;
1097 emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
1098 shadow_base = expand_binop (Pmode, lshr_optab, base,
1099 GEN_INT (ASAN_SHADOW_SHIFT),
1100 NULL_RTX, 1, OPTAB_DIRECT);
1101 shadow_base
1102 = plus_constant (Pmode, shadow_base,
1103 targetm.asan_shadow_offset ()
1104 + (base_align_bias >> ASAN_SHADOW_SHIFT));
1105 gcc_assert (asan_shadow_set != -1
1106 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1107 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
1108 set_mem_alias_set (shadow_mem, asan_shadow_set);
1109 if (STRICT_ALIGNMENT)
1110 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1111 prev_offset = base_offset;
1112 for (l = length; l; l -= 2)
1113 {
1114 if (l == 2)
1115 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1116 offset = offsets[l - 1];
1117 if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
1118 {
1119 int i;
1120 HOST_WIDE_INT aoff
1121 = base_offset + ((offset - base_offset)
1122 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1123 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1124 (aoff - prev_offset)
1125 >> ASAN_SHADOW_SHIFT);
1126 prev_offset = aoff;
1127 for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
1128 if (aoff < offset)
1129 {
1130 if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
1131 shadow_bytes[i] = 0;
1132 else
1133 shadow_bytes[i] = offset - aoff;
1134 }
1135 else
1136 shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
1137 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1138 offset = aoff;
1139 }
1140 while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
1141 {
1142 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1143 (offset - prev_offset)
1144 >> ASAN_SHADOW_SHIFT);
1145 prev_offset = offset;
1146 memset (shadow_bytes, cur_shadow_byte, 4);
1147 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1148 offset += ASAN_RED_ZONE_SIZE;
1149 }
1150 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1151 }
1152 do_pending_stack_adjust ();
1153
1154 /* Construct epilogue sequence. */
1155 start_sequence ();
1156
1157 lab = NULL_RTX;
1158 if (use_after_return_class != -1)
1159 {
1160 rtx lab2 = gen_label_rtx ();
1161 char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
1162 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1163 emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
1164 VOIDmode, 0, lab2, very_likely);
1165 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1166 set_mem_alias_set (shadow_mem, asan_shadow_set);
1167 mem = gen_rtx_MEM (ptr_mode, base);
1168 mem = adjust_address (mem, VOIDmode, base_align_bias);
1169 emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
1170 unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
1171 if (use_after_return_class < 5
1172 && can_store_by_pieces (sz, builtin_memset_read_str, &c,
1173 BITS_PER_UNIT, true))
1174 store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
1175 BITS_PER_UNIT, true, 0);
1176 else if (use_after_return_class >= 5
1177 || !set_storage_via_setmem (shadow_mem,
1178 GEN_INT (sz),
1179 gen_int_mode (c, QImode),
1180 BITS_PER_UNIT, BITS_PER_UNIT,
1181 -1, sz, sz, sz))
1182 {
1183 snprintf (buf, sizeof buf, "__asan_stack_free_%d",
1184 use_after_return_class);
1185 ret = init_one_libfunc (buf);
1186 rtx addr = convert_memory_address (ptr_mode, base);
1187 rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
1188 emit_library_call (ret, LCT_NORMAL, ptr_mode, 3, addr, ptr_mode,
1189 GEN_INT (asan_frame_size + base_align_bias),
1190 TYPE_MODE (pointer_sized_int_node),
1191 orig_addr, ptr_mode);
1192 }
1193 lab = gen_label_rtx ();
1194 emit_jump (lab);
1195 emit_label (lab2);
1196 }
1197
1198 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1199 set_mem_alias_set (shadow_mem, asan_shadow_set);
1200
1201 if (STRICT_ALIGNMENT)
1202 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1203
1204 prev_offset = base_offset;
1205 last_offset = base_offset;
1206 last_size = 0;
1207 for (l = length; l; l -= 2)
1208 {
1209 offset = base_offset + ((offsets[l - 1] - base_offset)
1210 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1211 if (last_offset + last_size != offset)
1212 {
1213 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1214 (last_offset - prev_offset)
1215 >> ASAN_SHADOW_SHIFT);
1216 prev_offset = last_offset;
1217 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1218 last_offset = offset;
1219 last_size = 0;
1220 }
1221 last_size += base_offset + ((offsets[l - 2] - base_offset)
1222 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1223 - offset;
1224 }
1225 if (last_size)
1226 {
1227 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1228 (last_offset - prev_offset)
1229 >> ASAN_SHADOW_SHIFT);
1230 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1231 }
1232
1233 do_pending_stack_adjust ();
1234 if (lab)
1235 emit_label (lab);
1236
1237 ret = get_insns ();
1238 end_sequence ();
1239 return ret;
1240 }
1241
1242 /* Return true if DECL, a global var, might be overridden and needs
1243 therefore a local alias. */
1244
1245 static bool
1246 asan_needs_local_alias (tree decl)
1247 {
1248 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1249 }
1250
1251 /* Return true if DECL is a VAR_DECL that should be protected
1252 by Address Sanitizer, by appending a red zone with protected
1253 shadow memory after it and aligning it to at least
1254 ASAN_RED_ZONE_SIZE bytes. */
1255
1256 bool
1257 asan_protect_global (tree decl)
1258 {
1259 if (!ASAN_GLOBALS)
1260 return false;
1261
1262 rtx rtl, symbol;
1263
1264 if (TREE_CODE (decl) == STRING_CST)
1265 {
1266 /* Instrument all STRING_CSTs except those created
1267 by asan_pp_string here. */
1268 if (shadow_ptr_types[0] != NULL_TREE
1269 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1270 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1271 return false;
1272 return true;
1273 }
1274 if (TREE_CODE (decl) != VAR_DECL
1275 /* TLS vars aren't statically protectable. */
1276 || DECL_THREAD_LOCAL_P (decl)
1277 /* Externs will be protected elsewhere. */
1278 || DECL_EXTERNAL (decl)
1279 || !DECL_RTL_SET_P (decl)
1280 /* Comdat vars pose an ABI problem, we can't know if
1281 the var that is selected by the linker will have
1282 padding or not. */
1283 || DECL_ONE_ONLY (decl)
1284 /* Similarly for common vars. People can use -fno-common. */
1285 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
1286 /* Don't protect if using user section, often vars placed
1287 into user section from multiple TUs are then assumed
1288 to be an array of such vars, putting padding in there
1289 breaks this assumption. */
1290 || (DECL_SECTION_NAME (decl) != NULL_TREE
1291 && !DECL_HAS_IMPLICIT_SECTION_NAME_P (decl))
1292 || DECL_SIZE (decl) == 0
1293 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1294 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1295 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE)
1296 return false;
1297
1298 rtl = DECL_RTL (decl);
1299 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1300 return false;
1301 symbol = XEXP (rtl, 0);
1302
1303 if (CONSTANT_POOL_ADDRESS_P (symbol)
1304 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1305 return false;
1306
1307 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1308 return false;
1309
1310 #ifndef ASM_OUTPUT_DEF
1311 if (asan_needs_local_alias (decl))
1312 return false;
1313 #endif
1314
1315 return true;
1316 }
1317
1318 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1319 IS_STORE is either 1 (for a store) or 0 (for a load). */
1320
1321 static tree
1322 report_error_func (bool is_store, HOST_WIDE_INT size_in_bytes, bool slow_p)
1323 {
1324 static enum built_in_function report[2][6]
1325 = { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1326 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1327 BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
1328 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1329 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1330 BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } };
1331 if ((size_in_bytes & (size_in_bytes - 1)) != 0
1332 || size_in_bytes > 16
1333 || slow_p)
1334 return builtin_decl_implicit (report[is_store][5]);
1335 return builtin_decl_implicit (report[is_store][exact_log2 (size_in_bytes)]);
1336 }
1337
1338 /* Split the current basic block and create a condition statement
1339 insertion point right before or after the statement pointed to by
1340 ITER. Return an iterator to the point at which the caller might
1341 safely insert the condition statement.
1342
1343 THEN_BLOCK must be set to the address of an uninitialized instance
1344 of basic_block. The function will then set *THEN_BLOCK to the
1345 'then block' of the condition statement to be inserted by the
1346 caller.
1347
1348 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1349 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1350
1351 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1352 block' of the condition statement to be inserted by the caller.
1353
1354 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1355 statements starting from *ITER, and *THEN_BLOCK is a new empty
1356 block.
1357
1358 *ITER is adjusted to point to always point to the first statement
1359 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1360 same as what ITER was pointing to prior to calling this function,
1361 if BEFORE_P is true; otherwise, it is its following statement. */
1362
1363 gimple_stmt_iterator
1364 create_cond_insert_point (gimple_stmt_iterator *iter,
1365 bool before_p,
1366 bool then_more_likely_p,
1367 bool create_then_fallthru_edge,
1368 basic_block *then_block,
1369 basic_block *fallthrough_block)
1370 {
1371 gimple_stmt_iterator gsi = *iter;
1372
1373 if (!gsi_end_p (gsi) && before_p)
1374 gsi_prev (&gsi);
1375
1376 basic_block cur_bb = gsi_bb (*iter);
1377
1378 edge e = split_block (cur_bb, gsi_stmt (gsi));
1379
1380 /* Get a hold on the 'condition block', the 'then block' and the
1381 'else block'. */
1382 basic_block cond_bb = e->src;
1383 basic_block fallthru_bb = e->dest;
1384 basic_block then_bb = create_empty_bb (cond_bb);
1385 if (current_loops)
1386 {
1387 add_bb_to_loop (then_bb, cond_bb->loop_father);
1388 loops_state_set (LOOPS_NEED_FIXUP);
1389 }
1390
1391 /* Set up the newly created 'then block'. */
1392 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1393 int fallthrough_probability
1394 = then_more_likely_p
1395 ? PROB_VERY_UNLIKELY
1396 : PROB_ALWAYS - PROB_VERY_UNLIKELY;
1397 e->probability = PROB_ALWAYS - fallthrough_probability;
1398 if (create_then_fallthru_edge)
1399 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1400
1401 /* Set up the fallthrough basic block. */
1402 e = find_edge (cond_bb, fallthru_bb);
1403 e->flags = EDGE_FALSE_VALUE;
1404 e->count = cond_bb->count;
1405 e->probability = fallthrough_probability;
1406
1407 /* Update dominance info for the newly created then_bb; note that
1408 fallthru_bb's dominance info has already been updated by
1409 split_bock. */
1410 if (dom_info_available_p (CDI_DOMINATORS))
1411 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1412
1413 *then_block = then_bb;
1414 *fallthrough_block = fallthru_bb;
1415 *iter = gsi_start_bb (fallthru_bb);
1416
1417 return gsi_last_bb (cond_bb);
1418 }
1419
1420 /* Insert an if condition followed by a 'then block' right before the
1421 statement pointed to by ITER. The fallthrough block -- which is the
1422 else block of the condition as well as the destination of the
1423 outcoming edge of the 'then block' -- starts with the statement
1424 pointed to by ITER.
1425
1426 COND is the condition of the if.
1427
1428 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1429 'then block' is higher than the probability of the edge to the
1430 fallthrough block.
1431
1432 Upon completion of the function, *THEN_BB is set to the newly
1433 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1434 fallthrough block.
1435
1436 *ITER is adjusted to still point to the same statement it was
1437 pointing to initially. */
1438
1439 static void
1440 insert_if_then_before_iter (gimple cond,
1441 gimple_stmt_iterator *iter,
1442 bool then_more_likely_p,
1443 basic_block *then_bb,
1444 basic_block *fallthrough_bb)
1445 {
1446 gimple_stmt_iterator cond_insert_point =
1447 create_cond_insert_point (iter,
1448 /*before_p=*/true,
1449 then_more_likely_p,
1450 /*create_then_fallthru_edge=*/true,
1451 then_bb,
1452 fallthrough_bb);
1453 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
1454 }
1455
1456 /* Build
1457 (base_addr >> ASAN_SHADOW_SHIFT) + targetm.asan_shadow_offset (). */
1458
1459 static tree
1460 build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
1461 tree base_addr, tree shadow_ptr_type)
1462 {
1463 tree t, uintptr_type = TREE_TYPE (base_addr);
1464 tree shadow_type = TREE_TYPE (shadow_ptr_type);
1465 gimple g;
1466
1467 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
1468 g = gimple_build_assign_with_ops (RSHIFT_EXPR,
1469 make_ssa_name (uintptr_type, NULL),
1470 base_addr, t);
1471 gimple_set_location (g, location);
1472 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1473
1474 t = build_int_cst (uintptr_type, targetm.asan_shadow_offset ());
1475 g = gimple_build_assign_with_ops (PLUS_EXPR,
1476 make_ssa_name (uintptr_type, NULL),
1477 gimple_assign_lhs (g), t);
1478 gimple_set_location (g, location);
1479 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1480
1481 g = gimple_build_assign_with_ops (NOP_EXPR,
1482 make_ssa_name (shadow_ptr_type, NULL),
1483 gimple_assign_lhs (g), NULL_TREE);
1484 gimple_set_location (g, location);
1485 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1486
1487 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
1488 build_int_cst (shadow_ptr_type, 0));
1489 g = gimple_build_assign_with_ops (MEM_REF,
1490 make_ssa_name (shadow_type, NULL),
1491 t, NULL_TREE);
1492 gimple_set_location (g, location);
1493 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1494 return gimple_assign_lhs (g);
1495 }
1496
1497 /* Instrument the memory access instruction BASE. Insert new
1498 statements before or after ITER.
1499
1500 Note that the memory access represented by BASE can be either an
1501 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1502 location. IS_STORE is TRUE for a store, FALSE for a load.
1503 BEFORE_P is TRUE for inserting the instrumentation code before
1504 ITER, FALSE for inserting it after ITER.
1505
1506 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1507 statement it was pointing to prior to calling this function,
1508 otherwise, it points to the statement logically following it. */
1509
1510 static void
1511 build_check_stmt (location_t location, tree base, gimple_stmt_iterator *iter,
1512 bool before_p, bool is_store, HOST_WIDE_INT size_in_bytes,
1513 bool slow_p = false)
1514 {
1515 gimple_stmt_iterator gsi;
1516 basic_block then_bb, else_bb;
1517 tree t, base_addr, shadow;
1518 gimple g;
1519 tree shadow_ptr_type = shadow_ptr_types[size_in_bytes == 16 ? 1 : 0];
1520 tree shadow_type = TREE_TYPE (shadow_ptr_type);
1521 tree uintptr_type
1522 = build_nonstandard_integer_type (TYPE_PRECISION (TREE_TYPE (base)), 1);
1523 tree base_ssa = base;
1524 HOST_WIDE_INT real_size_in_bytes = size_in_bytes;
1525 tree sz_arg = NULL_TREE;
1526
1527 if (size_in_bytes == 1)
1528 slow_p = false;
1529 else if ((size_in_bytes & (size_in_bytes - 1)) != 0
1530 || size_in_bytes > 16
1531 || slow_p)
1532 {
1533 real_size_in_bytes = 1;
1534 slow_p = true;
1535 }
1536
1537 /* Get an iterator on the point where we can add the condition
1538 statement for the instrumentation. */
1539 gsi = create_cond_insert_point (iter, before_p,
1540 /*then_more_likely_p=*/false,
1541 /*create_then_fallthru_edge=*/false,
1542 &then_bb,
1543 &else_bb);
1544
1545 base = unshare_expr (base);
1546
1547 /* BASE can already be an SSA_NAME; in that case, do not create a
1548 new SSA_NAME for it. */
1549 if (TREE_CODE (base) != SSA_NAME)
1550 {
1551 g = gimple_build_assign_with_ops (TREE_CODE (base),
1552 make_ssa_name (TREE_TYPE (base), NULL),
1553 base, NULL_TREE);
1554 gimple_set_location (g, location);
1555 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1556 base_ssa = gimple_assign_lhs (g);
1557 }
1558
1559 g = gimple_build_assign_with_ops (NOP_EXPR,
1560 make_ssa_name (uintptr_type, NULL),
1561 base_ssa, NULL_TREE);
1562 gimple_set_location (g, location);
1563 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1564 base_addr = gimple_assign_lhs (g);
1565
1566 /* Build
1567 (base_addr >> ASAN_SHADOW_SHIFT) + targetm.asan_shadow_offset (). */
1568 shadow = build_shadow_mem_access (&gsi, location, base_addr,
1569 shadow_ptr_type);
1570
1571 if (real_size_in_bytes < 8)
1572 {
1573 /* Slow path for 1, 2 and 4 byte accesses.
1574 Test (shadow != 0)
1575 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
1576 gimple_seq seq = NULL;
1577 gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
1578 gimple_seq_add_stmt (&seq, shadow_test);
1579 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, base_addr, 7));
1580 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
1581 gimple_seq_last (seq)));
1582 if (real_size_in_bytes > 1)
1583 gimple_seq_add_stmt (&seq,
1584 build_assign (PLUS_EXPR, gimple_seq_last (seq),
1585 real_size_in_bytes - 1));
1586 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, gimple_seq_last (seq),
1587 shadow));
1588 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
1589 gimple_seq_last (seq)));
1590 t = gimple_assign_lhs (gimple_seq_last (seq));
1591 gimple_seq_set_location (seq, location);
1592 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
1593 /* For weird access sizes or misaligned, check first and last byte. */
1594 if (slow_p)
1595 {
1596 g = gimple_build_assign_with_ops (PLUS_EXPR,
1597 make_ssa_name (uintptr_type, NULL),
1598 base_addr,
1599 build_int_cst (uintptr_type,
1600 size_in_bytes - 1));
1601 gimple_set_location (g, location);
1602 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1603 tree base_end_addr = gimple_assign_lhs (g);
1604
1605 shadow = build_shadow_mem_access (&gsi, location, base_end_addr,
1606 shadow_ptr_type);
1607 seq = NULL;
1608 shadow_test = build_assign (NE_EXPR, shadow, 0);
1609 gimple_seq_add_stmt (&seq, shadow_test);
1610 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
1611 base_end_addr, 7));
1612 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
1613 gimple_seq_last (seq)));
1614 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
1615 gimple_seq_last (seq),
1616 shadow));
1617 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
1618 gimple_seq_last (seq)));
1619 gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
1620 gimple_seq_last (seq)));
1621 t = gimple_assign_lhs (gimple_seq_last (seq));
1622 gimple_seq_set_location (seq, location);
1623 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
1624 sz_arg = build_int_cst (pointer_sized_int_node, size_in_bytes);
1625 }
1626 }
1627 else
1628 t = shadow;
1629
1630 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
1631 NULL_TREE, NULL_TREE);
1632 gimple_set_location (g, location);
1633 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1634
1635 /* Generate call to the run-time library (e.g. __asan_report_load8). */
1636 gsi = gsi_start_bb (then_bb);
1637 g = gimple_build_call (report_error_func (is_store, size_in_bytes, slow_p),
1638 sz_arg ? 2 : 1, base_addr, sz_arg);
1639 gimple_set_location (g, location);
1640 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1641
1642 *iter = gsi_start_bb (else_bb);
1643 }
1644
1645 /* If T represents a memory access, add instrumentation code before ITER.
1646 LOCATION is source code location.
1647 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
1648
1649 static void
1650 instrument_derefs (gimple_stmt_iterator *iter, tree t,
1651 location_t location, bool is_store)
1652 {
1653 if (is_store && !ASAN_INSTRUMENT_WRITES)
1654 return;
1655 if (!is_store && !ASAN_INSTRUMENT_READS)
1656 return;
1657
1658 tree type, base;
1659 HOST_WIDE_INT size_in_bytes;
1660
1661 type = TREE_TYPE (t);
1662 switch (TREE_CODE (t))
1663 {
1664 case ARRAY_REF:
1665 case COMPONENT_REF:
1666 case INDIRECT_REF:
1667 case MEM_REF:
1668 case VAR_DECL:
1669 break;
1670 /* FALLTHRU */
1671 default:
1672 return;
1673 }
1674
1675 size_in_bytes = int_size_in_bytes (type);
1676 if (size_in_bytes <= 0)
1677 return;
1678
1679 HOST_WIDE_INT bitsize, bitpos;
1680 tree offset;
1681 enum machine_mode mode;
1682 int volatilep = 0, unsignedp = 0;
1683 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset,
1684 &mode, &unsignedp, &volatilep, false);
1685 if (((size_in_bytes & (size_in_bytes - 1)) == 0
1686 && (bitpos % (size_in_bytes * BITS_PER_UNIT)))
1687 || bitsize != size_in_bytes * BITS_PER_UNIT)
1688 {
1689 if (TREE_CODE (t) == COMPONENT_REF
1690 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
1691 {
1692 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
1693 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
1694 TREE_OPERAND (t, 0), repr,
1695 NULL_TREE), location, is_store);
1696 }
1697 return;
1698 }
1699 if (bitpos % BITS_PER_UNIT)
1700 return;
1701
1702 if (TREE_CODE (inner) == VAR_DECL
1703 && offset == NULL_TREE
1704 && bitpos >= 0
1705 && DECL_SIZE (inner)
1706 && tree_fits_shwi_p (DECL_SIZE (inner))
1707 && bitpos + bitsize <= tree_to_shwi (DECL_SIZE (inner)))
1708 {
1709 if (DECL_THREAD_LOCAL_P (inner))
1710 return;
1711 if (!TREE_STATIC (inner))
1712 {
1713 /* Automatic vars in the current function will be always
1714 accessible. */
1715 if (decl_function_context (inner) == current_function_decl)
1716 return;
1717 }
1718 /* Always instrument external vars, they might be dynamically
1719 initialized. */
1720 else if (!DECL_EXTERNAL (inner))
1721 {
1722 /* For static vars if they are known not to be dynamically
1723 initialized, they will be always accessible. */
1724 varpool_node *vnode = varpool_get_node (inner);
1725 if (vnode && !vnode->dynamically_initialized)
1726 return;
1727 }
1728 }
1729
1730 base = build_fold_addr_expr (t);
1731 if (!has_mem_ref_been_instrumented (base, size_in_bytes))
1732 {
1733 bool slow_p = false;
1734 if (size_in_bytes > 1)
1735 {
1736 if ((size_in_bytes & (size_in_bytes - 1)) != 0
1737 || size_in_bytes > 16)
1738 slow_p = true;
1739 else
1740 {
1741 unsigned int align = get_object_alignment (t);
1742 if (align < size_in_bytes * BITS_PER_UNIT)
1743 {
1744 /* On non-strict alignment targets, if
1745 16-byte access is just 8-byte aligned,
1746 this will result in misaligned shadow
1747 memory 2 byte load, but otherwise can
1748 be handled using one read. */
1749 if (size_in_bytes != 16
1750 || STRICT_ALIGNMENT
1751 || align < 8 * BITS_PER_UNIT)
1752 slow_p = true;
1753 }
1754 }
1755 }
1756 build_check_stmt (location, base, iter, /*before_p=*/true,
1757 is_store, size_in_bytes, slow_p);
1758 update_mem_ref_hash_table (base, size_in_bytes);
1759 update_mem_ref_hash_table (t, size_in_bytes);
1760 }
1761
1762 }
1763
1764 /* Instrument an access to a contiguous memory region that starts at
1765 the address pointed to by BASE, over a length of LEN (expressed in
1766 the sizeof (*BASE) bytes). ITER points to the instruction before
1767 which the instrumentation instructions must be inserted. LOCATION
1768 is the source location that the instrumentation instructions must
1769 have. If IS_STORE is true, then the memory access is a store;
1770 otherwise, it's a load. */
1771
1772 static void
1773 instrument_mem_region_access (tree base, tree len,
1774 gimple_stmt_iterator *iter,
1775 location_t location, bool is_store)
1776 {
1777 if (!POINTER_TYPE_P (TREE_TYPE (base))
1778 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
1779 || integer_zerop (len))
1780 return;
1781
1782 gimple_stmt_iterator gsi = *iter;
1783
1784 basic_block fallthrough_bb = NULL, then_bb = NULL;
1785
1786 /* If the beginning of the memory region has already been
1787 instrumented, do not instrument it. */
1788 bool start_instrumented = has_mem_ref_been_instrumented (base, 1);
1789
1790 /* If the end of the memory region has already been instrumented, do
1791 not instrument it. */
1792 tree end = asan_mem_ref_get_end (base, len);
1793 bool end_instrumented = has_mem_ref_been_instrumented (end, 1);
1794
1795 if (start_instrumented && end_instrumented)
1796 return;
1797
1798 if (!is_gimple_constant (len))
1799 {
1800 /* So, the length of the memory area to asan-protect is
1801 non-constant. Let's guard the generated instrumentation code
1802 like:
1803
1804 if (len != 0)
1805 {
1806 //asan instrumentation code goes here.
1807 }
1808 // falltrough instructions, starting with *ITER. */
1809
1810 gimple g = gimple_build_cond (NE_EXPR,
1811 len,
1812 build_int_cst (TREE_TYPE (len), 0),
1813 NULL_TREE, NULL_TREE);
1814 gimple_set_location (g, location);
1815 insert_if_then_before_iter (g, iter, /*then_more_likely_p=*/true,
1816 &then_bb, &fallthrough_bb);
1817 /* Note that fallthrough_bb starts with the statement that was
1818 pointed to by ITER. */
1819
1820 /* The 'then block' of the 'if (len != 0) condition is where
1821 we'll generate the asan instrumentation code now. */
1822 gsi = gsi_last_bb (then_bb);
1823 }
1824
1825 if (!start_instrumented)
1826 {
1827 /* Instrument the beginning of the memory region to be accessed,
1828 and arrange for the rest of the intrumentation code to be
1829 inserted in the then block *after* the current gsi. */
1830 build_check_stmt (location, base, &gsi, /*before_p=*/true, is_store, 1);
1831
1832 if (then_bb)
1833 /* We are in the case where the length of the region is not
1834 constant; so instrumentation code is being generated in the
1835 'then block' of the 'if (len != 0) condition. Let's arrange
1836 for the subsequent instrumentation statements to go in the
1837 'then block'. */
1838 gsi = gsi_last_bb (then_bb);
1839 else
1840 {
1841 *iter = gsi;
1842 /* Don't remember this access as instrumented, if length
1843 is unknown. It might be zero and not being actually
1844 instrumented, so we can't rely on it being instrumented. */
1845 update_mem_ref_hash_table (base, 1);
1846 }
1847 }
1848
1849 if (end_instrumented)
1850 return;
1851
1852 /* We want to instrument the access at the end of the memory region,
1853 which is at (base + len - 1). */
1854
1855 /* offset = len - 1; */
1856 len = unshare_expr (len);
1857 tree offset;
1858 gimple_seq seq = NULL;
1859 if (TREE_CODE (len) == INTEGER_CST)
1860 offset = fold_build2 (MINUS_EXPR, size_type_node,
1861 fold_convert (size_type_node, len),
1862 build_int_cst (size_type_node, 1));
1863 else
1864 {
1865 gimple g;
1866 tree t;
1867
1868 if (TREE_CODE (len) != SSA_NAME)
1869 {
1870 t = make_ssa_name (TREE_TYPE (len), NULL);
1871 g = gimple_build_assign_with_ops (TREE_CODE (len), t, len, NULL);
1872 gimple_set_location (g, location);
1873 gimple_seq_add_stmt_without_update (&seq, g);
1874 len = t;
1875 }
1876 if (!useless_type_conversion_p (size_type_node, TREE_TYPE (len)))
1877 {
1878 t = make_ssa_name (size_type_node, NULL);
1879 g = gimple_build_assign_with_ops (NOP_EXPR, t, len, NULL);
1880 gimple_set_location (g, location);
1881 gimple_seq_add_stmt_without_update (&seq, g);
1882 len = t;
1883 }
1884
1885 t = make_ssa_name (size_type_node, NULL);
1886 g = gimple_build_assign_with_ops (MINUS_EXPR, t, len,
1887 build_int_cst (size_type_node, 1));
1888 gimple_set_location (g, location);
1889 gimple_seq_add_stmt_without_update (&seq, g);
1890 offset = gimple_assign_lhs (g);
1891 }
1892
1893 /* _1 = base; */
1894 base = unshare_expr (base);
1895 gimple region_end =
1896 gimple_build_assign_with_ops (TREE_CODE (base),
1897 make_ssa_name (TREE_TYPE (base), NULL),
1898 base, NULL);
1899 gimple_set_location (region_end, location);
1900 gimple_seq_add_stmt_without_update (&seq, region_end);
1901
1902 /* _2 = _1 + offset; */
1903 region_end =
1904 gimple_build_assign_with_ops (POINTER_PLUS_EXPR,
1905 make_ssa_name (TREE_TYPE (base), NULL),
1906 gimple_assign_lhs (region_end),
1907 offset);
1908 gimple_set_location (region_end, location);
1909 gimple_seq_add_stmt_without_update (&seq, region_end);
1910 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
1911
1912 /* instrument access at _2; */
1913 gsi = gsi_for_stmt (region_end);
1914 build_check_stmt (location, gimple_assign_lhs (region_end),
1915 &gsi, /*before_p=*/false, is_store, 1);
1916
1917 if (then_bb == NULL)
1918 update_mem_ref_hash_table (end, 1);
1919
1920 *iter = gsi_for_stmt (gsi_stmt (*iter));
1921 }
1922
1923 /* Instrument the call (to the builtin strlen function) pointed to by
1924 ITER.
1925
1926 This function instruments the access to the first byte of the
1927 argument, right before the call. After the call it instruments the
1928 access to the last byte of the argument; it uses the result of the
1929 call to deduce the offset of that last byte.
1930
1931 Upon completion, iff the call has actually been instrumented, this
1932 function returns TRUE and *ITER points to the statement logically
1933 following the built-in strlen function call *ITER was initially
1934 pointing to. Otherwise, the function returns FALSE and *ITER
1935 remains unchanged. */
1936
1937 static bool
1938 instrument_strlen_call (gimple_stmt_iterator *iter)
1939 {
1940 gimple call = gsi_stmt (*iter);
1941 gcc_assert (is_gimple_call (call));
1942
1943 tree callee = gimple_call_fndecl (call);
1944 gcc_assert (is_builtin_fn (callee)
1945 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
1946 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STRLEN);
1947
1948 tree len = gimple_call_lhs (call);
1949 if (len == NULL)
1950 /* Some passes might clear the return value of the strlen call;
1951 bail out in that case. Return FALSE as we are not advancing
1952 *ITER. */
1953 return false;
1954 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (len)));
1955
1956 location_t loc = gimple_location (call);
1957 tree str_arg = gimple_call_arg (call, 0);
1958
1959 /* Instrument the access to the first byte of str_arg. i.e:
1960
1961 _1 = str_arg; instrument (_1); */
1962 tree cptr_type = build_pointer_type (char_type_node);
1963 gimple str_arg_ssa =
1964 gimple_build_assign_with_ops (NOP_EXPR,
1965 make_ssa_name (cptr_type, NULL),
1966 str_arg, NULL);
1967 gimple_set_location (str_arg_ssa, loc);
1968 gimple_stmt_iterator gsi = *iter;
1969 gsi_insert_before (&gsi, str_arg_ssa, GSI_NEW_STMT);
1970 build_check_stmt (loc, gimple_assign_lhs (str_arg_ssa), &gsi,
1971 /*before_p=*/false, /*is_store=*/false, 1);
1972
1973 /* If we initially had an instruction like:
1974
1975 int n = strlen (str)
1976
1977 we now want to instrument the access to str[n], after the
1978 instruction above.*/
1979
1980 /* So let's build the access to str[n] that is, access through the
1981 pointer_plus expr: (_1 + len). */
1982 gimple stmt =
1983 gimple_build_assign_with_ops (POINTER_PLUS_EXPR,
1984 make_ssa_name (cptr_type, NULL),
1985 gimple_assign_lhs (str_arg_ssa),
1986 len);
1987 gimple_set_location (stmt, loc);
1988 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
1989
1990 build_check_stmt (loc, gimple_assign_lhs (stmt), &gsi,
1991 /*before_p=*/false, /*is_store=*/false, 1);
1992
1993 /* Ensure that iter points to the statement logically following the
1994 one it was initially pointing to. */
1995 *iter = gsi;
1996 /* As *ITER has been advanced to point to the next statement, let's
1997 return true to inform transform_statements that it shouldn't
1998 advance *ITER anymore; otherwises it will skip that next
1999 statement, which wouldn't be instrumented. */
2000 return true;
2001 }
2002
2003 /* Instrument the call to a built-in memory access function that is
2004 pointed to by the iterator ITER.
2005
2006 Upon completion, return TRUE iff *ITER has been advanced to the
2007 statement following the one it was originally pointing to. */
2008
2009 static bool
2010 instrument_builtin_call (gimple_stmt_iterator *iter)
2011 {
2012 if (!ASAN_MEMINTRIN)
2013 return false;
2014
2015 bool iter_advanced_p = false;
2016 gimple call = gsi_stmt (*iter);
2017
2018 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
2019
2020 tree callee = gimple_call_fndecl (call);
2021 location_t loc = gimple_location (call);
2022
2023 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STRLEN)
2024 iter_advanced_p = instrument_strlen_call (iter);
2025 else
2026 {
2027 asan_mem_ref src0, src1, dest;
2028 asan_mem_ref_init (&src0, NULL, 1);
2029 asan_mem_ref_init (&src1, NULL, 1);
2030 asan_mem_ref_init (&dest, NULL, 1);
2031
2032 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
2033 bool src0_is_store = false, src1_is_store = false,
2034 dest_is_store = false, dest_is_deref = false;
2035
2036 if (get_mem_refs_of_builtin_call (call,
2037 &src0, &src0_len, &src0_is_store,
2038 &src1, &src1_len, &src1_is_store,
2039 &dest, &dest_len, &dest_is_store,
2040 &dest_is_deref))
2041 {
2042 if (dest_is_deref)
2043 {
2044 instrument_derefs (iter, dest.start, loc, dest_is_store);
2045 gsi_next (iter);
2046 iter_advanced_p = true;
2047 }
2048 else if (src0_len || src1_len || dest_len)
2049 {
2050 if (src0.start != NULL_TREE)
2051 instrument_mem_region_access (src0.start, src0_len,
2052 iter, loc, /*is_store=*/false);
2053 if (src1.start != NULL_TREE)
2054 instrument_mem_region_access (src1.start, src1_len,
2055 iter, loc, /*is_store=*/false);
2056 if (dest.start != NULL_TREE)
2057 instrument_mem_region_access (dest.start, dest_len,
2058 iter, loc, /*is_store=*/true);
2059 *iter = gsi_for_stmt (call);
2060 gsi_next (iter);
2061 iter_advanced_p = true;
2062 }
2063 }
2064 }
2065 return iter_advanced_p;
2066 }
2067
2068 /* Instrument the assignment statement ITER if it is subject to
2069 instrumentation. Return TRUE iff instrumentation actually
2070 happened. In that case, the iterator ITER is advanced to the next
2071 logical expression following the one initially pointed to by ITER,
2072 and the relevant memory reference that which access has been
2073 instrumented is added to the memory references hash table. */
2074
2075 static bool
2076 maybe_instrument_assignment (gimple_stmt_iterator *iter)
2077 {
2078 gimple s = gsi_stmt (*iter);
2079
2080 gcc_assert (gimple_assign_single_p (s));
2081
2082 tree ref_expr = NULL_TREE;
2083 bool is_store, is_instrumented = false;
2084
2085 if (gimple_store_p (s))
2086 {
2087 ref_expr = gimple_assign_lhs (s);
2088 is_store = true;
2089 instrument_derefs (iter, ref_expr,
2090 gimple_location (s),
2091 is_store);
2092 is_instrumented = true;
2093 }
2094
2095 if (gimple_assign_load_p (s))
2096 {
2097 ref_expr = gimple_assign_rhs1 (s);
2098 is_store = false;
2099 instrument_derefs (iter, ref_expr,
2100 gimple_location (s),
2101 is_store);
2102 is_instrumented = true;
2103 }
2104
2105 if (is_instrumented)
2106 gsi_next (iter);
2107
2108 return is_instrumented;
2109 }
2110
2111 /* Instrument the function call pointed to by the iterator ITER, if it
2112 is subject to instrumentation. At the moment, the only function
2113 calls that are instrumented are some built-in functions that access
2114 memory. Look at instrument_builtin_call to learn more.
2115
2116 Upon completion return TRUE iff *ITER was advanced to the statement
2117 following the one it was originally pointing to. */
2118
2119 static bool
2120 maybe_instrument_call (gimple_stmt_iterator *iter)
2121 {
2122 gimple stmt = gsi_stmt (*iter);
2123 bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
2124
2125 if (is_builtin && instrument_builtin_call (iter))
2126 return true;
2127
2128 if (gimple_call_noreturn_p (stmt))
2129 {
2130 if (is_builtin)
2131 {
2132 tree callee = gimple_call_fndecl (stmt);
2133 switch (DECL_FUNCTION_CODE (callee))
2134 {
2135 case BUILT_IN_UNREACHABLE:
2136 case BUILT_IN_TRAP:
2137 /* Don't instrument these. */
2138 return false;
2139 }
2140 }
2141 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
2142 gimple g = gimple_build_call (decl, 0);
2143 gimple_set_location (g, gimple_location (stmt));
2144 gsi_insert_before (iter, g, GSI_SAME_STMT);
2145 }
2146 return false;
2147 }
2148
2149 /* Walk each instruction of all basic block and instrument those that
2150 represent memory references: loads, stores, or function calls.
2151 In a given basic block, this function avoids instrumenting memory
2152 references that have already been instrumented. */
2153
2154 static void
2155 transform_statements (void)
2156 {
2157 basic_block bb, last_bb = NULL;
2158 gimple_stmt_iterator i;
2159 int saved_last_basic_block = last_basic_block_for_fn (cfun);
2160
2161 FOR_EACH_BB_FN (bb, cfun)
2162 {
2163 basic_block prev_bb = bb;
2164
2165 if (bb->index >= saved_last_basic_block) continue;
2166
2167 /* Flush the mem ref hash table, if current bb doesn't have
2168 exactly one predecessor, or if that predecessor (skipping
2169 over asan created basic blocks) isn't the last processed
2170 basic block. Thus we effectively flush on extended basic
2171 block boundaries. */
2172 while (single_pred_p (prev_bb))
2173 {
2174 prev_bb = single_pred (prev_bb);
2175 if (prev_bb->index < saved_last_basic_block)
2176 break;
2177 }
2178 if (prev_bb != last_bb)
2179 empty_mem_ref_hash_table ();
2180 last_bb = bb;
2181
2182 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
2183 {
2184 gimple s = gsi_stmt (i);
2185
2186 if (has_stmt_been_instrumented_p (s))
2187 gsi_next (&i);
2188 else if (gimple_assign_single_p (s)
2189 && maybe_instrument_assignment (&i))
2190 /* Nothing to do as maybe_instrument_assignment advanced
2191 the iterator I. */;
2192 else if (is_gimple_call (s) && maybe_instrument_call (&i))
2193 /* Nothing to do as maybe_instrument_call
2194 advanced the iterator I. */;
2195 else
2196 {
2197 /* No instrumentation happened.
2198
2199 If the current instruction is a function call that
2200 might free something, let's forget about the memory
2201 references that got instrumented. Otherwise we might
2202 miss some instrumentation opportunities. */
2203 if (is_gimple_call (s) && !nonfreeing_call_p (s))
2204 empty_mem_ref_hash_table ();
2205
2206 gsi_next (&i);
2207 }
2208 }
2209 }
2210 free_mem_ref_resources ();
2211 }
2212
2213 /* Build
2214 __asan_before_dynamic_init (module_name)
2215 or
2216 __asan_after_dynamic_init ()
2217 call. */
2218
2219 tree
2220 asan_dynamic_init_call (bool after_p)
2221 {
2222 tree fn = builtin_decl_implicit (after_p
2223 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2224 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
2225 tree module_name_cst = NULL_TREE;
2226 if (!after_p)
2227 {
2228 pretty_printer module_name_pp;
2229 pp_string (&module_name_pp, main_input_filename);
2230
2231 if (shadow_ptr_types[0] == NULL_TREE)
2232 asan_init_shadow_ptr_types ();
2233 module_name_cst = asan_pp_string (&module_name_pp);
2234 module_name_cst = fold_convert (const_ptr_type_node,
2235 module_name_cst);
2236 }
2237
2238 return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
2239 }
2240
2241 /* Build
2242 struct __asan_global
2243 {
2244 const void *__beg;
2245 uptr __size;
2246 uptr __size_with_redzone;
2247 const void *__name;
2248 const void *__module_name;
2249 uptr __has_dynamic_init;
2250 } type. */
2251
2252 static tree
2253 asan_global_struct (void)
2254 {
2255 static const char *field_names[6]
2256 = { "__beg", "__size", "__size_with_redzone",
2257 "__name", "__module_name", "__has_dynamic_init" };
2258 tree fields[6], ret;
2259 int i;
2260
2261 ret = make_node (RECORD_TYPE);
2262 for (i = 0; i < 6; i++)
2263 {
2264 fields[i]
2265 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2266 get_identifier (field_names[i]),
2267 (i == 0 || i == 3) ? const_ptr_type_node
2268 : pointer_sized_int_node);
2269 DECL_CONTEXT (fields[i]) = ret;
2270 if (i)
2271 DECL_CHAIN (fields[i - 1]) = fields[i];
2272 }
2273 TYPE_FIELDS (ret) = fields[0];
2274 TYPE_NAME (ret) = get_identifier ("__asan_global");
2275 layout_type (ret);
2276 return ret;
2277 }
2278
2279 /* Append description of a single global DECL into vector V.
2280 TYPE is __asan_global struct type as returned by asan_global_struct. */
2281
2282 static void
2283 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
2284 {
2285 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2286 unsigned HOST_WIDE_INT size;
2287 tree str_cst, module_name_cst, refdecl = decl;
2288 vec<constructor_elt, va_gc> *vinner = NULL;
2289
2290 pretty_printer asan_pp, module_name_pp;
2291
2292 if (DECL_NAME (decl))
2293 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
2294 else
2295 pp_string (&asan_pp, "<unknown>");
2296 str_cst = asan_pp_string (&asan_pp);
2297
2298 pp_string (&module_name_pp, main_input_filename);
2299 module_name_cst = asan_pp_string (&module_name_pp);
2300
2301 if (asan_needs_local_alias (decl))
2302 {
2303 char buf[20];
2304 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
2305 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2306 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2307 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2308 TREE_READONLY (refdecl) = TREE_READONLY (decl);
2309 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2310 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2311 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2312 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2313 TREE_STATIC (refdecl) = 1;
2314 TREE_PUBLIC (refdecl) = 0;
2315 TREE_USED (refdecl) = 1;
2316 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2317 }
2318
2319 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2320 fold_convert (const_ptr_type_node,
2321 build_fold_addr_expr (refdecl)));
2322 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2323 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2324 size += asan_red_zone_size (size);
2325 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2326 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2327 fold_convert (const_ptr_type_node, str_cst));
2328 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2329 fold_convert (const_ptr_type_node, module_name_cst));
2330 varpool_node *vnode = varpool_get_node (decl);
2331 int has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
2332 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2333 build_int_cst (uptr, has_dynamic_init));
2334 init = build_constructor (type, vinner);
2335 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2336 }
2337
2338 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2339 void
2340 initialize_sanitizer_builtins (void)
2341 {
2342 tree decl;
2343
2344 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2345 return;
2346
2347 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2348 tree BT_FN_VOID_PTR
2349 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2350 tree BT_FN_VOID_CONST_PTR
2351 = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
2352 tree BT_FN_VOID_PTR_PTR
2353 = build_function_type_list (void_type_node, ptr_type_node,
2354 ptr_type_node, NULL_TREE);
2355 tree BT_FN_VOID_PTR_PTR_PTR
2356 = build_function_type_list (void_type_node, ptr_type_node,
2357 ptr_type_node, ptr_type_node, NULL_TREE);
2358 tree BT_FN_VOID_PTR_PTRMODE
2359 = build_function_type_list (void_type_node, ptr_type_node,
2360 pointer_sized_int_node, NULL_TREE);
2361 tree BT_FN_VOID_INT
2362 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
2363 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2364 tree BT_FN_IX_CONST_VPTR_INT[5];
2365 tree BT_FN_IX_VPTR_IX_INT[5];
2366 tree BT_FN_VOID_VPTR_IX_INT[5];
2367 tree vptr
2368 = build_pointer_type (build_qualified_type (void_type_node,
2369 TYPE_QUAL_VOLATILE));
2370 tree cvptr
2371 = build_pointer_type (build_qualified_type (void_type_node,
2372 TYPE_QUAL_VOLATILE
2373 |TYPE_QUAL_CONST));
2374 tree boolt
2375 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2376 int i;
2377 for (i = 0; i < 5; i++)
2378 {
2379 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2380 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2381 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
2382 integer_type_node, integer_type_node,
2383 NULL_TREE);
2384 BT_FN_IX_CONST_VPTR_INT[i]
2385 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2386 BT_FN_IX_VPTR_IX_INT[i]
2387 = build_function_type_list (ix, vptr, ix, integer_type_node,
2388 NULL_TREE);
2389 BT_FN_VOID_VPTR_IX_INT[i]
2390 = build_function_type_list (void_type_node, vptr, ix,
2391 integer_type_node, NULL_TREE);
2392 }
2393 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2394 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2395 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2396 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2397 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2398 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2399 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2400 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2401 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2402 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2403 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2404 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2405 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2406 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2407 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2408 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2409 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2410 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2411 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2412 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2413 #undef ATTR_NOTHROW_LEAF_LIST
2414 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2415 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2416 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2417 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2418 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2419 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2420 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2421 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2422 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2423 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2424 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2425 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2426 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2427 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2428 #undef DEF_SANITIZER_BUILTIN
2429 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2430 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2431 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2432 set_call_expr_flags (decl, ATTRS); \
2433 set_builtin_decl (ENUM, decl, true);
2434
2435 #include "sanitizer.def"
2436
2437 #undef DEF_SANITIZER_BUILTIN
2438 }
2439
2440 /* Called via htab_traverse. Count number of emitted
2441 STRING_CSTs in the constant hash table. */
2442
2443 static int
2444 count_string_csts (void **slot, void *data)
2445 {
2446 struct constant_descriptor_tree *desc
2447 = (struct constant_descriptor_tree *) *slot;
2448 if (TREE_CODE (desc->value) == STRING_CST
2449 && TREE_ASM_WRITTEN (desc->value)
2450 && asan_protect_global (desc->value))
2451 ++*((unsigned HOST_WIDE_INT *) data);
2452 return 1;
2453 }
2454
2455 /* Helper structure to pass two parameters to
2456 add_string_csts. */
2457
2458 struct asan_add_string_csts_data
2459 {
2460 tree type;
2461 vec<constructor_elt, va_gc> *v;
2462 };
2463
2464 /* Called via htab_traverse. Call asan_add_global
2465 on emitted STRING_CSTs from the constant hash table. */
2466
2467 static int
2468 add_string_csts (void **slot, void *data)
2469 {
2470 struct constant_descriptor_tree *desc
2471 = (struct constant_descriptor_tree *) *slot;
2472 if (TREE_CODE (desc->value) == STRING_CST
2473 && TREE_ASM_WRITTEN (desc->value)
2474 && asan_protect_global (desc->value))
2475 {
2476 struct asan_add_string_csts_data *aascd
2477 = (struct asan_add_string_csts_data *) data;
2478 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
2479 aascd->type, aascd->v);
2480 }
2481 return 1;
2482 }
2483
2484 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2485 invoke ggc_collect. */
2486 static GTY(()) tree asan_ctor_statements;
2487
2488 /* Module-level instrumentation.
2489 - Insert __asan_init_vN() into the list of CTORs.
2490 - TODO: insert redzones around globals.
2491 */
2492
2493 void
2494 asan_finish_file (void)
2495 {
2496 varpool_node *vnode;
2497 unsigned HOST_WIDE_INT gcount = 0;
2498
2499 if (shadow_ptr_types[0] == NULL_TREE)
2500 asan_init_shadow_ptr_types ();
2501 /* Avoid instrumenting code in the asan ctors/dtors.
2502 We don't need to insert padding after the description strings,
2503 nor after .LASAN* array. */
2504 flag_sanitize &= ~SANITIZE_ADDRESS;
2505
2506 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
2507 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2508 FOR_EACH_DEFINED_VARIABLE (vnode)
2509 if (TREE_ASM_WRITTEN (vnode->decl)
2510 && asan_protect_global (vnode->decl))
2511 ++gcount;
2512 htab_t const_desc_htab = constant_pool_htab ();
2513 htab_traverse (const_desc_htab, count_string_csts, &gcount);
2514 if (gcount)
2515 {
2516 tree type = asan_global_struct (), var, ctor;
2517 tree dtor_statements = NULL_TREE;
2518 vec<constructor_elt, va_gc> *v;
2519 char buf[20];
2520
2521 type = build_array_type_nelts (type, gcount);
2522 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
2523 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2524 type);
2525 TREE_STATIC (var) = 1;
2526 TREE_PUBLIC (var) = 0;
2527 DECL_ARTIFICIAL (var) = 1;
2528 DECL_IGNORED_P (var) = 1;
2529 vec_alloc (v, gcount);
2530 FOR_EACH_DEFINED_VARIABLE (vnode)
2531 if (TREE_ASM_WRITTEN (vnode->decl)
2532 && asan_protect_global (vnode->decl))
2533 asan_add_global (vnode->decl, TREE_TYPE (type), v);
2534 struct asan_add_string_csts_data aascd;
2535 aascd.type = TREE_TYPE (type);
2536 aascd.v = v;
2537 htab_traverse (const_desc_htab, add_string_csts, &aascd);
2538 ctor = build_constructor (type, v);
2539 TREE_CONSTANT (ctor) = 1;
2540 TREE_STATIC (ctor) = 1;
2541 DECL_INITIAL (var) = ctor;
2542 varpool_assemble_decl (varpool_node_for_decl (var));
2543
2544 fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
2545 tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
2546 append_to_statement_list (build_call_expr (fn, 2,
2547 build_fold_addr_expr (var),
2548 gcount_tree),
2549 &asan_ctor_statements);
2550
2551 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
2552 append_to_statement_list (build_call_expr (fn, 2,
2553 build_fold_addr_expr (var),
2554 gcount_tree),
2555 &dtor_statements);
2556 cgraph_build_static_cdtor ('D', dtor_statements,
2557 MAX_RESERVED_INIT_PRIORITY - 1);
2558 }
2559 cgraph_build_static_cdtor ('I', asan_ctor_statements,
2560 MAX_RESERVED_INIT_PRIORITY - 1);
2561 flag_sanitize |= SANITIZE_ADDRESS;
2562 }
2563
2564 /* Instrument the current function. */
2565
2566 static unsigned int
2567 asan_instrument (void)
2568 {
2569 if (shadow_ptr_types[0] == NULL_TREE)
2570 asan_init_shadow_ptr_types ();
2571 transform_statements ();
2572 return 0;
2573 }
2574
2575 static bool
2576 gate_asan (void)
2577 {
2578 return (flag_sanitize & SANITIZE_ADDRESS) != 0
2579 && !lookup_attribute ("no_sanitize_address",
2580 DECL_ATTRIBUTES (current_function_decl));
2581 }
2582
2583 namespace {
2584
2585 const pass_data pass_data_asan =
2586 {
2587 GIMPLE_PASS, /* type */
2588 "asan", /* name */
2589 OPTGROUP_NONE, /* optinfo_flags */
2590 true, /* has_execute */
2591 TV_NONE, /* tv_id */
2592 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2593 0, /* properties_provided */
2594 0, /* properties_destroyed */
2595 0, /* todo_flags_start */
2596 TODO_update_ssa, /* todo_flags_finish */
2597 };
2598
2599 class pass_asan : public gimple_opt_pass
2600 {
2601 public:
2602 pass_asan (gcc::context *ctxt)
2603 : gimple_opt_pass (pass_data_asan, ctxt)
2604 {}
2605
2606 /* opt_pass methods: */
2607 opt_pass * clone () { return new pass_asan (m_ctxt); }
2608 virtual bool gate (function *) { return gate_asan (); }
2609 virtual unsigned int execute (function *) { return asan_instrument (); }
2610
2611 }; // class pass_asan
2612
2613 } // anon namespace
2614
2615 gimple_opt_pass *
2616 make_pass_asan (gcc::context *ctxt)
2617 {
2618 return new pass_asan (ctxt);
2619 }
2620
2621 namespace {
2622
2623 const pass_data pass_data_asan_O0 =
2624 {
2625 GIMPLE_PASS, /* type */
2626 "asan0", /* name */
2627 OPTGROUP_NONE, /* optinfo_flags */
2628 true, /* has_execute */
2629 TV_NONE, /* tv_id */
2630 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2631 0, /* properties_provided */
2632 0, /* properties_destroyed */
2633 0, /* todo_flags_start */
2634 TODO_update_ssa, /* todo_flags_finish */
2635 };
2636
2637 class pass_asan_O0 : public gimple_opt_pass
2638 {
2639 public:
2640 pass_asan_O0 (gcc::context *ctxt)
2641 : gimple_opt_pass (pass_data_asan_O0, ctxt)
2642 {}
2643
2644 /* opt_pass methods: */
2645 virtual bool gate (function *) { return !optimize && gate_asan (); }
2646 virtual unsigned int execute (function *) { return asan_instrument (); }
2647
2648 }; // class pass_asan_O0
2649
2650 } // anon namespace
2651
2652 gimple_opt_pass *
2653 make_pass_asan_O0 (gcc::context *ctxt)
2654 {
2655 return new pass_asan_O0 (ctxt);
2656 }
2657
2658 /* Perform optimization of sanitize functions. */
2659
2660 namespace {
2661
2662 const pass_data pass_data_sanopt =
2663 {
2664 GIMPLE_PASS, /* type */
2665 "sanopt", /* name */
2666 OPTGROUP_NONE, /* optinfo_flags */
2667 true, /* has_execute */
2668 TV_NONE, /* tv_id */
2669 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2670 0, /* properties_provided */
2671 0, /* properties_destroyed */
2672 0, /* todo_flags_start */
2673 TODO_update_ssa, /* todo_flags_finish */
2674 };
2675
2676 class pass_sanopt : public gimple_opt_pass
2677 {
2678 public:
2679 pass_sanopt (gcc::context *ctxt)
2680 : gimple_opt_pass (pass_data_sanopt, ctxt)
2681 {}
2682
2683 /* opt_pass methods: */
2684 virtual bool gate (function *) { return flag_sanitize; }
2685 virtual unsigned int execute (function *);
2686
2687 }; // class pass_sanopt
2688
2689 unsigned int
2690 pass_sanopt::execute (function *fun)
2691 {
2692 basic_block bb;
2693
2694 FOR_EACH_BB_FN (bb, fun)
2695 {
2696 gimple_stmt_iterator gsi;
2697 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2698 {
2699 gimple stmt = gsi_stmt (gsi);
2700
2701 if (!is_gimple_call (stmt))
2702 continue;
2703
2704 if (gimple_call_internal_p (stmt))
2705 switch (gimple_call_internal_fn (stmt))
2706 {
2707 case IFN_UBSAN_NULL:
2708 ubsan_expand_null_ifn (gsi);
2709 break;
2710 default:
2711 break;
2712 }
2713
2714 if (dump_file && (dump_flags & TDF_DETAILS))
2715 {
2716 fprintf (dump_file, "Optimized\n ");
2717 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2718 fprintf (dump_file, "\n");
2719 }
2720 }
2721 }
2722 return 0;
2723 }
2724
2725 } // anon namespace
2726
2727 gimple_opt_pass *
2728 make_pass_sanopt (gcc::context *ctxt)
2729 {
2730 return new pass_sanopt (ctxt);
2731 }
2732
2733 #include "gt-asan.h"