Add pretty printer for ASAN_MARK and add a helper fn
[gcc.git] / gcc / asan.c
1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2016 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "target.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "cfghooks.h"
31 #include "alloc-pool.h"
32 #include "tree-pass.h"
33 #include "memmodel.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "optabs.h"
39 #include "emit-rtl.h"
40 #include "cgraph.h"
41 #include "gimple-pretty-print.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "cfganal.h"
45 #include "gimplify.h"
46 #include "gimple-iterator.h"
47 #include "varasm.h"
48 #include "stor-layout.h"
49 #include "tree-iterator.h"
50 #include "asan.h"
51 #include "dojump.h"
52 #include "explow.h"
53 #include "expr.h"
54 #include "output.h"
55 #include "langhooks.h"
56 #include "cfgloop.h"
57 #include "gimple-builder.h"
58 #include "ubsan.h"
59 #include "params.h"
60 #include "builtins.h"
61 #include "fnmatch.h"
62
63 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
64 with <2x slowdown on average.
65
66 The tool consists of two parts:
67 instrumentation module (this file) and a run-time library.
68 The instrumentation module adds a run-time check before every memory insn.
69 For a 8- or 16- byte load accessing address X:
70 ShadowAddr = (X >> 3) + Offset
71 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
72 if (ShadowValue)
73 __asan_report_load8(X);
74 For a load of N bytes (N=1, 2 or 4) from address X:
75 ShadowAddr = (X >> 3) + Offset
76 ShadowValue = *(char*)ShadowAddr;
77 if (ShadowValue)
78 if ((X & 7) + N - 1 > ShadowValue)
79 __asan_report_loadN(X);
80 Stores are instrumented similarly, but using __asan_report_storeN functions.
81 A call too __asan_init_vN() is inserted to the list of module CTORs.
82 N is the version number of the AddressSanitizer API. The changes between the
83 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
84
85 The run-time library redefines malloc (so that redzone are inserted around
86 the allocated memory) and free (so that reuse of free-ed memory is delayed),
87 provides __asan_report* and __asan_init_vN functions.
88
89 Read more:
90 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
91
92 The current implementation supports detection of out-of-bounds and
93 use-after-free in the heap, on the stack and for global variables.
94
95 [Protection of stack variables]
96
97 To understand how detection of out-of-bounds and use-after-free works
98 for stack variables, lets look at this example on x86_64 where the
99 stack grows downward:
100
101 int
102 foo ()
103 {
104 char a[23] = {0};
105 int b[2] = {0};
106
107 a[5] = 1;
108 b[1] = 2;
109
110 return a[5] + b[1];
111 }
112
113 For this function, the stack protected by asan will be organized as
114 follows, from the top of the stack to the bottom:
115
116 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
117
118 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
119 the next slot be 32 bytes aligned; this one is called Partial
120 Redzone; this 32 bytes alignment is an asan constraint]
121
122 Slot 3/ [24 bytes for variable 'a']
123
124 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
125
126 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
127
128 Slot 6/ [8 bytes for variable 'b']
129
130 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
131 'LEFT RedZone']
132
133 The 32 bytes of LEFT red zone at the bottom of the stack can be
134 decomposed as such:
135
136 1/ The first 8 bytes contain a magical asan number that is always
137 0x41B58AB3.
138
139 2/ The following 8 bytes contains a pointer to a string (to be
140 parsed at runtime by the runtime asan library), which format is
141 the following:
142
143 "<function-name> <space> <num-of-variables-on-the-stack>
144 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
145 <length-of-var-in-bytes> ){n} "
146
147 where '(...){n}' means the content inside the parenthesis occurs 'n'
148 times, with 'n' being the number of variables on the stack.
149
150 3/ The following 8 bytes contain the PC of the current function which
151 will be used by the run-time library to print an error message.
152
153 4/ The following 8 bytes are reserved for internal use by the run-time.
154
155 The shadow memory for that stack layout is going to look like this:
156
157 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
158 The F1 byte pattern is a magic number called
159 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
160 the memory for that shadow byte is part of a the LEFT red zone
161 intended to seat at the bottom of the variables on the stack.
162
163 - content of shadow memory 8 bytes for slots 6 and 5:
164 0xF4F4F400. The F4 byte pattern is a magic number
165 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
166 memory region for this shadow byte is a PARTIAL red zone
167 intended to pad a variable A, so that the slot following
168 {A,padding} is 32 bytes aligned.
169
170 Note that the fact that the least significant byte of this
171 shadow memory content is 00 means that 8 bytes of its
172 corresponding memory (which corresponds to the memory of
173 variable 'b') is addressable.
174
175 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
176 The F2 byte pattern is a magic number called
177 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
178 region for this shadow byte is a MIDDLE red zone intended to
179 seat between two 32 aligned slots of {variable,padding}.
180
181 - content of shadow memory 8 bytes for slot 3 and 2:
182 0xF4000000. This represents is the concatenation of
183 variable 'a' and the partial red zone following it, like what we
184 had for variable 'b'. The least significant 3 bytes being 00
185 means that the 3 bytes of variable 'a' are addressable.
186
187 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
188 The F3 byte pattern is a magic number called
189 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
190 region for this shadow byte is a RIGHT red zone intended to seat
191 at the top of the variables of the stack.
192
193 Note that the real variable layout is done in expand_used_vars in
194 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
195 stack variables as well as the different red zones, emits some
196 prologue code to populate the shadow memory as to poison (mark as
197 non-accessible) the regions of the red zones and mark the regions of
198 stack variables as accessible, and emit some epilogue code to
199 un-poison (mark as accessible) the regions of red zones right before
200 the function exits.
201
202 [Protection of global variables]
203
204 The basic idea is to insert a red zone between two global variables
205 and install a constructor function that calls the asan runtime to do
206 the populating of the relevant shadow memory regions at load time.
207
208 So the global variables are laid out as to insert a red zone between
209 them. The size of the red zones is so that each variable starts on a
210 32 bytes boundary.
211
212 Then a constructor function is installed so that, for each global
213 variable, it calls the runtime asan library function
214 __asan_register_globals_with an instance of this type:
215
216 struct __asan_global
217 {
218 // Address of the beginning of the global variable.
219 const void *__beg;
220
221 // Initial size of the global variable.
222 uptr __size;
223
224 // Size of the global variable + size of the red zone. This
225 // size is 32 bytes aligned.
226 uptr __size_with_redzone;
227
228 // Name of the global variable.
229 const void *__name;
230
231 // Name of the module where the global variable is declared.
232 const void *__module_name;
233
234 // 1 if it has dynamic initialization, 0 otherwise.
235 uptr __has_dynamic_init;
236
237 // A pointer to struct that contains source location, could be NULL.
238 __asan_global_source_location *__location;
239 }
240
241 A destructor function that calls the runtime asan library function
242 _asan_unregister_globals is also installed. */
243
244 static unsigned HOST_WIDE_INT asan_shadow_offset_value;
245 static bool asan_shadow_offset_computed;
246 static vec<char *> sanitized_sections;
247
248 /* Set of variable declarations that are going to be guarded by
249 use-after-scope sanitizer. */
250
251 static hash_set<tree> *asan_handled_variables = NULL;
252
253 hash_set <tree> *asan_used_labels = NULL;
254
255 /* Sets shadow offset to value in string VAL. */
256
257 bool
258 set_asan_shadow_offset (const char *val)
259 {
260 char *endp;
261
262 errno = 0;
263 #ifdef HAVE_LONG_LONG
264 asan_shadow_offset_value = strtoull (val, &endp, 0);
265 #else
266 asan_shadow_offset_value = strtoul (val, &endp, 0);
267 #endif
268 if (!(*val != '\0' && *endp == '\0' && errno == 0))
269 return false;
270
271 asan_shadow_offset_computed = true;
272
273 return true;
274 }
275
276 /* Set list of user-defined sections that need to be sanitized. */
277
278 void
279 set_sanitized_sections (const char *sections)
280 {
281 char *pat;
282 unsigned i;
283 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
284 free (pat);
285 sanitized_sections.truncate (0);
286
287 for (const char *s = sections; *s; )
288 {
289 const char *end;
290 for (end = s; *end && *end != ','; ++end);
291 size_t len = end - s;
292 sanitized_sections.safe_push (xstrndup (s, len));
293 s = *end ? end + 1 : end;
294 }
295 }
296
297 bool
298 asan_mark_p (gimple *stmt, enum asan_mark_flags flag)
299 {
300 return (gimple_call_internal_p (stmt, IFN_ASAN_MARK)
301 && tree_to_uhwi (gimple_call_arg (stmt, 0)) == flag);
302 }
303
304 bool
305 asan_sanitize_stack_p (void)
306 {
307 return ((flag_sanitize & SANITIZE_ADDRESS)
308 && ASAN_STACK
309 && !asan_no_sanitize_address_p ());
310 }
311
312 /* Checks whether section SEC should be sanitized. */
313
314 static bool
315 section_sanitized_p (const char *sec)
316 {
317 char *pat;
318 unsigned i;
319 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
320 if (fnmatch (pat, sec, FNM_PERIOD) == 0)
321 return true;
322 return false;
323 }
324
325 /* Returns Asan shadow offset. */
326
327 static unsigned HOST_WIDE_INT
328 asan_shadow_offset ()
329 {
330 if (!asan_shadow_offset_computed)
331 {
332 asan_shadow_offset_computed = true;
333 asan_shadow_offset_value = targetm.asan_shadow_offset ();
334 }
335 return asan_shadow_offset_value;
336 }
337
338 alias_set_type asan_shadow_set = -1;
339
340 /* Pointer types to 1, 2 or 4 byte integers in shadow memory. A separate
341 alias set is used for all shadow memory accesses. */
342 static GTY(()) tree shadow_ptr_types[3];
343
344 /* Decl for __asan_option_detect_stack_use_after_return. */
345 static GTY(()) tree asan_detect_stack_use_after_return;
346
347 /* Hashtable support for memory references used by gimple
348 statements. */
349
350 /* This type represents a reference to a memory region. */
351 struct asan_mem_ref
352 {
353 /* The expression of the beginning of the memory region. */
354 tree start;
355
356 /* The size of the access. */
357 HOST_WIDE_INT access_size;
358 };
359
360 object_allocator <asan_mem_ref> asan_mem_ref_pool ("asan_mem_ref");
361
362 /* Initializes an instance of asan_mem_ref. */
363
364 static void
365 asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
366 {
367 ref->start = start;
368 ref->access_size = access_size;
369 }
370
371 /* Allocates memory for an instance of asan_mem_ref into the memory
372 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
373 START is the address of (or the expression pointing to) the
374 beginning of memory reference. ACCESS_SIZE is the size of the
375 access to the referenced memory. */
376
377 static asan_mem_ref*
378 asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
379 {
380 asan_mem_ref *ref = asan_mem_ref_pool.allocate ();
381
382 asan_mem_ref_init (ref, start, access_size);
383 return ref;
384 }
385
386 /* This builds and returns a pointer to the end of the memory region
387 that starts at START and of length LEN. */
388
389 tree
390 asan_mem_ref_get_end (tree start, tree len)
391 {
392 if (len == NULL_TREE || integer_zerop (len))
393 return start;
394
395 if (!ptrofftype_p (len))
396 len = convert_to_ptrofftype (len);
397
398 return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
399 }
400
401 /* Return a tree expression that represents the end of the referenced
402 memory region. Beware that this function can actually build a new
403 tree expression. */
404
405 tree
406 asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
407 {
408 return asan_mem_ref_get_end (ref->start, len);
409 }
410
411 struct asan_mem_ref_hasher : nofree_ptr_hash <asan_mem_ref>
412 {
413 static inline hashval_t hash (const asan_mem_ref *);
414 static inline bool equal (const asan_mem_ref *, const asan_mem_ref *);
415 };
416
417 /* Hash a memory reference. */
418
419 inline hashval_t
420 asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
421 {
422 return iterative_hash_expr (mem_ref->start, 0);
423 }
424
425 /* Compare two memory references. We accept the length of either
426 memory references to be NULL_TREE. */
427
428 inline bool
429 asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
430 const asan_mem_ref *m2)
431 {
432 return operand_equal_p (m1->start, m2->start, 0);
433 }
434
435 static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
436
437 /* Returns a reference to the hash table containing memory references.
438 This function ensures that the hash table is created. Note that
439 this hash table is updated by the function
440 update_mem_ref_hash_table. */
441
442 static hash_table<asan_mem_ref_hasher> *
443 get_mem_ref_hash_table ()
444 {
445 if (!asan_mem_ref_ht)
446 asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
447
448 return asan_mem_ref_ht;
449 }
450
451 /* Clear all entries from the memory references hash table. */
452
453 static void
454 empty_mem_ref_hash_table ()
455 {
456 if (asan_mem_ref_ht)
457 asan_mem_ref_ht->empty ();
458 }
459
460 /* Free the memory references hash table. */
461
462 static void
463 free_mem_ref_resources ()
464 {
465 delete asan_mem_ref_ht;
466 asan_mem_ref_ht = NULL;
467
468 asan_mem_ref_pool.release ();
469 }
470
471 /* Return true iff the memory reference REF has been instrumented. */
472
473 static bool
474 has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
475 {
476 asan_mem_ref r;
477 asan_mem_ref_init (&r, ref, access_size);
478
479 asan_mem_ref *saved_ref = get_mem_ref_hash_table ()->find (&r);
480 return saved_ref && saved_ref->access_size >= access_size;
481 }
482
483 /* Return true iff the memory reference REF has been instrumented. */
484
485 static bool
486 has_mem_ref_been_instrumented (const asan_mem_ref *ref)
487 {
488 return has_mem_ref_been_instrumented (ref->start, ref->access_size);
489 }
490
491 /* Return true iff access to memory region starting at REF and of
492 length LEN has been instrumented. */
493
494 static bool
495 has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
496 {
497 HOST_WIDE_INT size_in_bytes
498 = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
499
500 return size_in_bytes != -1
501 && has_mem_ref_been_instrumented (ref->start, size_in_bytes);
502 }
503
504 /* Set REF to the memory reference present in a gimple assignment
505 ASSIGNMENT. Return true upon successful completion, false
506 otherwise. */
507
508 static bool
509 get_mem_ref_of_assignment (const gassign *assignment,
510 asan_mem_ref *ref,
511 bool *ref_is_store)
512 {
513 gcc_assert (gimple_assign_single_p (assignment));
514
515 if (gimple_store_p (assignment)
516 && !gimple_clobber_p (assignment))
517 {
518 ref->start = gimple_assign_lhs (assignment);
519 *ref_is_store = true;
520 }
521 else if (gimple_assign_load_p (assignment))
522 {
523 ref->start = gimple_assign_rhs1 (assignment);
524 *ref_is_store = false;
525 }
526 else
527 return false;
528
529 ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
530 return true;
531 }
532
533 /* Return the memory references contained in a gimple statement
534 representing a builtin call that has to do with memory access. */
535
536 static bool
537 get_mem_refs_of_builtin_call (const gcall *call,
538 asan_mem_ref *src0,
539 tree *src0_len,
540 bool *src0_is_store,
541 asan_mem_ref *src1,
542 tree *src1_len,
543 bool *src1_is_store,
544 asan_mem_ref *dst,
545 tree *dst_len,
546 bool *dst_is_store,
547 bool *dest_is_deref,
548 bool *intercepted_p)
549 {
550 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
551
552 tree callee = gimple_call_fndecl (call);
553 tree source0 = NULL_TREE, source1 = NULL_TREE,
554 dest = NULL_TREE, len = NULL_TREE;
555 bool is_store = true, got_reference_p = false;
556 HOST_WIDE_INT access_size = 1;
557
558 *intercepted_p = asan_intercepted_p ((DECL_FUNCTION_CODE (callee)));
559
560 switch (DECL_FUNCTION_CODE (callee))
561 {
562 /* (s, s, n) style memops. */
563 case BUILT_IN_BCMP:
564 case BUILT_IN_MEMCMP:
565 source0 = gimple_call_arg (call, 0);
566 source1 = gimple_call_arg (call, 1);
567 len = gimple_call_arg (call, 2);
568 break;
569
570 /* (src, dest, n) style memops. */
571 case BUILT_IN_BCOPY:
572 source0 = gimple_call_arg (call, 0);
573 dest = gimple_call_arg (call, 1);
574 len = gimple_call_arg (call, 2);
575 break;
576
577 /* (dest, src, n) style memops. */
578 case BUILT_IN_MEMCPY:
579 case BUILT_IN_MEMCPY_CHK:
580 case BUILT_IN_MEMMOVE:
581 case BUILT_IN_MEMMOVE_CHK:
582 case BUILT_IN_MEMPCPY:
583 case BUILT_IN_MEMPCPY_CHK:
584 dest = gimple_call_arg (call, 0);
585 source0 = gimple_call_arg (call, 1);
586 len = gimple_call_arg (call, 2);
587 break;
588
589 /* (dest, n) style memops. */
590 case BUILT_IN_BZERO:
591 dest = gimple_call_arg (call, 0);
592 len = gimple_call_arg (call, 1);
593 break;
594
595 /* (dest, x, n) style memops*/
596 case BUILT_IN_MEMSET:
597 case BUILT_IN_MEMSET_CHK:
598 dest = gimple_call_arg (call, 0);
599 len = gimple_call_arg (call, 2);
600 break;
601
602 case BUILT_IN_STRLEN:
603 source0 = gimple_call_arg (call, 0);
604 len = gimple_call_lhs (call);
605 break ;
606
607 /* And now the __atomic* and __sync builtins.
608 These are handled differently from the classical memory memory
609 access builtins above. */
610
611 case BUILT_IN_ATOMIC_LOAD_1:
612 case BUILT_IN_ATOMIC_LOAD_2:
613 case BUILT_IN_ATOMIC_LOAD_4:
614 case BUILT_IN_ATOMIC_LOAD_8:
615 case BUILT_IN_ATOMIC_LOAD_16:
616 is_store = false;
617 /* fall through. */
618
619 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
620 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
621 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
622 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
623 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
624
625 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
626 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
627 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
628 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
629 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
630
631 case BUILT_IN_SYNC_FETCH_AND_OR_1:
632 case BUILT_IN_SYNC_FETCH_AND_OR_2:
633 case BUILT_IN_SYNC_FETCH_AND_OR_4:
634 case BUILT_IN_SYNC_FETCH_AND_OR_8:
635 case BUILT_IN_SYNC_FETCH_AND_OR_16:
636
637 case BUILT_IN_SYNC_FETCH_AND_AND_1:
638 case BUILT_IN_SYNC_FETCH_AND_AND_2:
639 case BUILT_IN_SYNC_FETCH_AND_AND_4:
640 case BUILT_IN_SYNC_FETCH_AND_AND_8:
641 case BUILT_IN_SYNC_FETCH_AND_AND_16:
642
643 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
644 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
645 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
646 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
647 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
648
649 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
650 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
651 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
652 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
653
654 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
655 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
656 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
657 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
658 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
659
660 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
661 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
662 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
663 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
664 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
665
666 case BUILT_IN_SYNC_OR_AND_FETCH_1:
667 case BUILT_IN_SYNC_OR_AND_FETCH_2:
668 case BUILT_IN_SYNC_OR_AND_FETCH_4:
669 case BUILT_IN_SYNC_OR_AND_FETCH_8:
670 case BUILT_IN_SYNC_OR_AND_FETCH_16:
671
672 case BUILT_IN_SYNC_AND_AND_FETCH_1:
673 case BUILT_IN_SYNC_AND_AND_FETCH_2:
674 case BUILT_IN_SYNC_AND_AND_FETCH_4:
675 case BUILT_IN_SYNC_AND_AND_FETCH_8:
676 case BUILT_IN_SYNC_AND_AND_FETCH_16:
677
678 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
679 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
680 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
681 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
682 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
683
684 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
685 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
686 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
687 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
688
689 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
690 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
691 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
692 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
693 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
694
695 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
696 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
697 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
698 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
699 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
700
701 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
702 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
703 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
704 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
705 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
706
707 case BUILT_IN_SYNC_LOCK_RELEASE_1:
708 case BUILT_IN_SYNC_LOCK_RELEASE_2:
709 case BUILT_IN_SYNC_LOCK_RELEASE_4:
710 case BUILT_IN_SYNC_LOCK_RELEASE_8:
711 case BUILT_IN_SYNC_LOCK_RELEASE_16:
712
713 case BUILT_IN_ATOMIC_EXCHANGE_1:
714 case BUILT_IN_ATOMIC_EXCHANGE_2:
715 case BUILT_IN_ATOMIC_EXCHANGE_4:
716 case BUILT_IN_ATOMIC_EXCHANGE_8:
717 case BUILT_IN_ATOMIC_EXCHANGE_16:
718
719 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
720 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
721 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
722 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
723 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
724
725 case BUILT_IN_ATOMIC_STORE_1:
726 case BUILT_IN_ATOMIC_STORE_2:
727 case BUILT_IN_ATOMIC_STORE_4:
728 case BUILT_IN_ATOMIC_STORE_8:
729 case BUILT_IN_ATOMIC_STORE_16:
730
731 case BUILT_IN_ATOMIC_ADD_FETCH_1:
732 case BUILT_IN_ATOMIC_ADD_FETCH_2:
733 case BUILT_IN_ATOMIC_ADD_FETCH_4:
734 case BUILT_IN_ATOMIC_ADD_FETCH_8:
735 case BUILT_IN_ATOMIC_ADD_FETCH_16:
736
737 case BUILT_IN_ATOMIC_SUB_FETCH_1:
738 case BUILT_IN_ATOMIC_SUB_FETCH_2:
739 case BUILT_IN_ATOMIC_SUB_FETCH_4:
740 case BUILT_IN_ATOMIC_SUB_FETCH_8:
741 case BUILT_IN_ATOMIC_SUB_FETCH_16:
742
743 case BUILT_IN_ATOMIC_AND_FETCH_1:
744 case BUILT_IN_ATOMIC_AND_FETCH_2:
745 case BUILT_IN_ATOMIC_AND_FETCH_4:
746 case BUILT_IN_ATOMIC_AND_FETCH_8:
747 case BUILT_IN_ATOMIC_AND_FETCH_16:
748
749 case BUILT_IN_ATOMIC_NAND_FETCH_1:
750 case BUILT_IN_ATOMIC_NAND_FETCH_2:
751 case BUILT_IN_ATOMIC_NAND_FETCH_4:
752 case BUILT_IN_ATOMIC_NAND_FETCH_8:
753 case BUILT_IN_ATOMIC_NAND_FETCH_16:
754
755 case BUILT_IN_ATOMIC_XOR_FETCH_1:
756 case BUILT_IN_ATOMIC_XOR_FETCH_2:
757 case BUILT_IN_ATOMIC_XOR_FETCH_4:
758 case BUILT_IN_ATOMIC_XOR_FETCH_8:
759 case BUILT_IN_ATOMIC_XOR_FETCH_16:
760
761 case BUILT_IN_ATOMIC_OR_FETCH_1:
762 case BUILT_IN_ATOMIC_OR_FETCH_2:
763 case BUILT_IN_ATOMIC_OR_FETCH_4:
764 case BUILT_IN_ATOMIC_OR_FETCH_8:
765 case BUILT_IN_ATOMIC_OR_FETCH_16:
766
767 case BUILT_IN_ATOMIC_FETCH_ADD_1:
768 case BUILT_IN_ATOMIC_FETCH_ADD_2:
769 case BUILT_IN_ATOMIC_FETCH_ADD_4:
770 case BUILT_IN_ATOMIC_FETCH_ADD_8:
771 case BUILT_IN_ATOMIC_FETCH_ADD_16:
772
773 case BUILT_IN_ATOMIC_FETCH_SUB_1:
774 case BUILT_IN_ATOMIC_FETCH_SUB_2:
775 case BUILT_IN_ATOMIC_FETCH_SUB_4:
776 case BUILT_IN_ATOMIC_FETCH_SUB_8:
777 case BUILT_IN_ATOMIC_FETCH_SUB_16:
778
779 case BUILT_IN_ATOMIC_FETCH_AND_1:
780 case BUILT_IN_ATOMIC_FETCH_AND_2:
781 case BUILT_IN_ATOMIC_FETCH_AND_4:
782 case BUILT_IN_ATOMIC_FETCH_AND_8:
783 case BUILT_IN_ATOMIC_FETCH_AND_16:
784
785 case BUILT_IN_ATOMIC_FETCH_NAND_1:
786 case BUILT_IN_ATOMIC_FETCH_NAND_2:
787 case BUILT_IN_ATOMIC_FETCH_NAND_4:
788 case BUILT_IN_ATOMIC_FETCH_NAND_8:
789 case BUILT_IN_ATOMIC_FETCH_NAND_16:
790
791 case BUILT_IN_ATOMIC_FETCH_XOR_1:
792 case BUILT_IN_ATOMIC_FETCH_XOR_2:
793 case BUILT_IN_ATOMIC_FETCH_XOR_4:
794 case BUILT_IN_ATOMIC_FETCH_XOR_8:
795 case BUILT_IN_ATOMIC_FETCH_XOR_16:
796
797 case BUILT_IN_ATOMIC_FETCH_OR_1:
798 case BUILT_IN_ATOMIC_FETCH_OR_2:
799 case BUILT_IN_ATOMIC_FETCH_OR_4:
800 case BUILT_IN_ATOMIC_FETCH_OR_8:
801 case BUILT_IN_ATOMIC_FETCH_OR_16:
802 {
803 dest = gimple_call_arg (call, 0);
804 /* DEST represents the address of a memory location.
805 instrument_derefs wants the memory location, so lets
806 dereference the address DEST before handing it to
807 instrument_derefs. */
808 if (TREE_CODE (dest) == ADDR_EXPR)
809 dest = TREE_OPERAND (dest, 0);
810 else if (TREE_CODE (dest) == SSA_NAME || TREE_CODE (dest) == INTEGER_CST)
811 dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
812 dest, build_int_cst (TREE_TYPE (dest), 0));
813 else
814 gcc_unreachable ();
815
816 access_size = int_size_in_bytes (TREE_TYPE (dest));
817 }
818
819 default:
820 /* The other builtins memory access are not instrumented in this
821 function because they either don't have any length parameter,
822 or their length parameter is just a limit. */
823 break;
824 }
825
826 if (len != NULL_TREE)
827 {
828 if (source0 != NULL_TREE)
829 {
830 src0->start = source0;
831 src0->access_size = access_size;
832 *src0_len = len;
833 *src0_is_store = false;
834 }
835
836 if (source1 != NULL_TREE)
837 {
838 src1->start = source1;
839 src1->access_size = access_size;
840 *src1_len = len;
841 *src1_is_store = false;
842 }
843
844 if (dest != NULL_TREE)
845 {
846 dst->start = dest;
847 dst->access_size = access_size;
848 *dst_len = len;
849 *dst_is_store = true;
850 }
851
852 got_reference_p = true;
853 }
854 else if (dest)
855 {
856 dst->start = dest;
857 dst->access_size = access_size;
858 *dst_len = NULL_TREE;
859 *dst_is_store = is_store;
860 *dest_is_deref = true;
861 got_reference_p = true;
862 }
863
864 return got_reference_p;
865 }
866
867 /* Return true iff a given gimple statement has been instrumented.
868 Note that the statement is "defined" by the memory references it
869 contains. */
870
871 static bool
872 has_stmt_been_instrumented_p (gimple *stmt)
873 {
874 if (gimple_assign_single_p (stmt))
875 {
876 bool r_is_store;
877 asan_mem_ref r;
878 asan_mem_ref_init (&r, NULL, 1);
879
880 if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
881 &r_is_store))
882 return has_mem_ref_been_instrumented (&r);
883 }
884 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
885 {
886 asan_mem_ref src0, src1, dest;
887 asan_mem_ref_init (&src0, NULL, 1);
888 asan_mem_ref_init (&src1, NULL, 1);
889 asan_mem_ref_init (&dest, NULL, 1);
890
891 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
892 bool src0_is_store = false, src1_is_store = false,
893 dest_is_store = false, dest_is_deref = false, intercepted_p = true;
894 if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
895 &src0, &src0_len, &src0_is_store,
896 &src1, &src1_len, &src1_is_store,
897 &dest, &dest_len, &dest_is_store,
898 &dest_is_deref, &intercepted_p))
899 {
900 if (src0.start != NULL_TREE
901 && !has_mem_ref_been_instrumented (&src0, src0_len))
902 return false;
903
904 if (src1.start != NULL_TREE
905 && !has_mem_ref_been_instrumented (&src1, src1_len))
906 return false;
907
908 if (dest.start != NULL_TREE
909 && !has_mem_ref_been_instrumented (&dest, dest_len))
910 return false;
911
912 return true;
913 }
914 }
915 else if (is_gimple_call (stmt) && gimple_store_p (stmt))
916 {
917 asan_mem_ref r;
918 asan_mem_ref_init (&r, NULL, 1);
919
920 r.start = gimple_call_lhs (stmt);
921 r.access_size = int_size_in_bytes (TREE_TYPE (r.start));
922 return has_mem_ref_been_instrumented (&r);
923 }
924
925 return false;
926 }
927
928 /* Insert a memory reference into the hash table. */
929
930 static void
931 update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
932 {
933 hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
934
935 asan_mem_ref r;
936 asan_mem_ref_init (&r, ref, access_size);
937
938 asan_mem_ref **slot = ht->find_slot (&r, INSERT);
939 if (*slot == NULL || (*slot)->access_size < access_size)
940 *slot = asan_mem_ref_new (ref, access_size);
941 }
942
943 /* Initialize shadow_ptr_types array. */
944
945 static void
946 asan_init_shadow_ptr_types (void)
947 {
948 asan_shadow_set = new_alias_set ();
949 tree types[3] = { signed_char_type_node, short_integer_type_node,
950 integer_type_node };
951
952 for (unsigned i = 0; i < 3; i++)
953 {
954 shadow_ptr_types[i] = build_distinct_type_copy (types[i]);
955 TYPE_ALIAS_SET (shadow_ptr_types[i]) = asan_shadow_set;
956 shadow_ptr_types[i] = build_pointer_type (shadow_ptr_types[i]);
957 }
958
959 initialize_sanitizer_builtins ();
960 }
961
962 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
963
964 static tree
965 asan_pp_string (pretty_printer *pp)
966 {
967 const char *buf = pp_formatted_text (pp);
968 size_t len = strlen (buf);
969 tree ret = build_string (len + 1, buf);
970 TREE_TYPE (ret)
971 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
972 build_index_type (size_int (len)));
973 TREE_READONLY (ret) = 1;
974 TREE_STATIC (ret) = 1;
975 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
976 }
977
978 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
979
980 static rtx
981 asan_shadow_cst (unsigned char shadow_bytes[4])
982 {
983 int i;
984 unsigned HOST_WIDE_INT val = 0;
985 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
986 for (i = 0; i < 4; i++)
987 val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
988 << (BITS_PER_UNIT * i);
989 return gen_int_mode (val, SImode);
990 }
991
992 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
993 though. */
994
995 static void
996 asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
997 {
998 rtx_insn *insn, *insns, *jump;
999 rtx_code_label *top_label;
1000 rtx end, addr, tmp;
1001
1002 start_sequence ();
1003 clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
1004 insns = get_insns ();
1005 end_sequence ();
1006 for (insn = insns; insn; insn = NEXT_INSN (insn))
1007 if (CALL_P (insn))
1008 break;
1009 if (insn == NULL_RTX)
1010 {
1011 emit_insn (insns);
1012 return;
1013 }
1014
1015 gcc_assert ((len & 3) == 0);
1016 top_label = gen_label_rtx ();
1017 addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
1018 shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
1019 end = force_reg (Pmode, plus_constant (Pmode, addr, len));
1020 emit_label (top_label);
1021
1022 emit_move_insn (shadow_mem, const0_rtx);
1023 tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
1024 true, OPTAB_LIB_WIDEN);
1025 if (tmp != addr)
1026 emit_move_insn (addr, tmp);
1027 emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
1028 jump = get_last_insn ();
1029 gcc_assert (JUMP_P (jump));
1030 add_int_reg_note (jump, REG_BR_PROB, REG_BR_PROB_BASE * 80 / 100);
1031 }
1032
1033 void
1034 asan_function_start (void)
1035 {
1036 section *fnsec = function_section (current_function_decl);
1037 switch_to_section (fnsec);
1038 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
1039 current_function_funcdef_no);
1040 }
1041
1042 /* Return number of shadow bytes that are occupied by a local variable
1043 of SIZE bytes. */
1044
1045 static unsigned HOST_WIDE_INT
1046 shadow_mem_size (unsigned HOST_WIDE_INT size)
1047 {
1048 return ROUND_UP (size, ASAN_SHADOW_GRANULARITY) / ASAN_SHADOW_GRANULARITY;
1049 }
1050
1051 /* Insert code to protect stack vars. The prologue sequence should be emitted
1052 directly, epilogue sequence returned. BASE is the register holding the
1053 stack base, against which OFFSETS array offsets are relative to, OFFSETS
1054 array contains pairs of offsets in reverse order, always the end offset
1055 of some gap that needs protection followed by starting offset,
1056 and DECLS is an array of representative decls for each var partition.
1057 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1058 elements long (OFFSETS include gap before the first variable as well
1059 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
1060 register which stack vars DECL_RTLs are based on. Either BASE should be
1061 assigned to PBASE, when not doing use after return protection, or
1062 corresponding address based on __asan_stack_malloc* return value. */
1063
1064 rtx_insn *
1065 asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
1066 HOST_WIDE_INT *offsets, tree *decls, int length)
1067 {
1068 rtx shadow_base, shadow_mem, ret, mem, orig_base;
1069 rtx_code_label *lab;
1070 rtx_insn *insns;
1071 char buf[30];
1072 unsigned char shadow_bytes[4];
1073 HOST_WIDE_INT base_offset = offsets[length - 1];
1074 HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
1075 HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
1076 HOST_WIDE_INT last_offset;
1077 int l;
1078 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
1079 tree str_cst, decl, id;
1080 int use_after_return_class = -1;
1081
1082 if (shadow_ptr_types[0] == NULL_TREE)
1083 asan_init_shadow_ptr_types ();
1084
1085 /* First of all, prepare the description string. */
1086 pretty_printer asan_pp;
1087
1088 pp_decimal_int (&asan_pp, length / 2 - 1);
1089 pp_space (&asan_pp);
1090 for (l = length - 2; l; l -= 2)
1091 {
1092 tree decl = decls[l / 2 - 1];
1093 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
1094 pp_space (&asan_pp);
1095 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
1096 pp_space (&asan_pp);
1097 if (DECL_P (decl) && DECL_NAME (decl))
1098 {
1099 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
1100 pp_space (&asan_pp);
1101 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
1102 }
1103 else
1104 pp_string (&asan_pp, "9 <unknown>");
1105 pp_space (&asan_pp);
1106 }
1107 str_cst = asan_pp_string (&asan_pp);
1108
1109 /* Emit the prologue sequence. */
1110 if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
1111 && ASAN_USE_AFTER_RETURN)
1112 {
1113 use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
1114 /* __asan_stack_malloc_N guarantees alignment
1115 N < 6 ? (64 << N) : 4096 bytes. */
1116 if (alignb > (use_after_return_class < 6
1117 ? (64U << use_after_return_class) : 4096U))
1118 use_after_return_class = -1;
1119 else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
1120 base_align_bias = ((asan_frame_size + alignb - 1)
1121 & ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
1122 }
1123 /* Align base if target is STRICT_ALIGNMENT. */
1124 if (STRICT_ALIGNMENT)
1125 base = expand_binop (Pmode, and_optab, base,
1126 gen_int_mode (-((GET_MODE_ALIGNMENT (SImode)
1127 << ASAN_SHADOW_SHIFT)
1128 / BITS_PER_UNIT), Pmode), NULL_RTX,
1129 1, OPTAB_DIRECT);
1130
1131 if (use_after_return_class == -1 && pbase)
1132 emit_move_insn (pbase, base);
1133
1134 base = expand_binop (Pmode, add_optab, base,
1135 gen_int_mode (base_offset - base_align_bias, Pmode),
1136 NULL_RTX, 1, OPTAB_DIRECT);
1137 orig_base = NULL_RTX;
1138 if (use_after_return_class != -1)
1139 {
1140 if (asan_detect_stack_use_after_return == NULL_TREE)
1141 {
1142 id = get_identifier ("__asan_option_detect_stack_use_after_return");
1143 decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
1144 integer_type_node);
1145 SET_DECL_ASSEMBLER_NAME (decl, id);
1146 TREE_ADDRESSABLE (decl) = 1;
1147 DECL_ARTIFICIAL (decl) = 1;
1148 DECL_IGNORED_P (decl) = 1;
1149 DECL_EXTERNAL (decl) = 1;
1150 TREE_STATIC (decl) = 1;
1151 TREE_PUBLIC (decl) = 1;
1152 TREE_USED (decl) = 1;
1153 asan_detect_stack_use_after_return = decl;
1154 }
1155 orig_base = gen_reg_rtx (Pmode);
1156 emit_move_insn (orig_base, base);
1157 ret = expand_normal (asan_detect_stack_use_after_return);
1158 lab = gen_label_rtx ();
1159 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1160 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1161 VOIDmode, 0, lab, very_likely);
1162 snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
1163 use_after_return_class);
1164 ret = init_one_libfunc (buf);
1165 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 1,
1166 GEN_INT (asan_frame_size
1167 + base_align_bias),
1168 TYPE_MODE (pointer_sized_int_node));
1169 /* __asan_stack_malloc_[n] returns a pointer to fake stack if succeeded
1170 and NULL otherwise. Check RET value is NULL here and jump over the
1171 BASE reassignment in this case. Otherwise, reassign BASE to RET. */
1172 int very_unlikely = REG_BR_PROB_BASE / 2000 - 1;
1173 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1174 VOIDmode, 0, lab, very_unlikely);
1175 ret = convert_memory_address (Pmode, ret);
1176 emit_move_insn (base, ret);
1177 emit_label (lab);
1178 emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
1179 gen_int_mode (base_align_bias
1180 - base_offset, Pmode),
1181 NULL_RTX, 1, OPTAB_DIRECT));
1182 }
1183 mem = gen_rtx_MEM (ptr_mode, base);
1184 mem = adjust_address (mem, VOIDmode, base_align_bias);
1185 emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
1186 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1187 emit_move_insn (mem, expand_normal (str_cst));
1188 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1189 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
1190 id = get_identifier (buf);
1191 decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1192 VAR_DECL, id, char_type_node);
1193 SET_DECL_ASSEMBLER_NAME (decl, id);
1194 TREE_ADDRESSABLE (decl) = 1;
1195 TREE_READONLY (decl) = 1;
1196 DECL_ARTIFICIAL (decl) = 1;
1197 DECL_IGNORED_P (decl) = 1;
1198 TREE_STATIC (decl) = 1;
1199 TREE_PUBLIC (decl) = 0;
1200 TREE_USED (decl) = 1;
1201 DECL_INITIAL (decl) = decl;
1202 TREE_ASM_WRITTEN (decl) = 1;
1203 TREE_ASM_WRITTEN (id) = 1;
1204 emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
1205 shadow_base = expand_binop (Pmode, lshr_optab, base,
1206 GEN_INT (ASAN_SHADOW_SHIFT),
1207 NULL_RTX, 1, OPTAB_DIRECT);
1208 shadow_base
1209 = plus_constant (Pmode, shadow_base,
1210 asan_shadow_offset ()
1211 + (base_align_bias >> ASAN_SHADOW_SHIFT));
1212 gcc_assert (asan_shadow_set != -1
1213 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1214 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
1215 set_mem_alias_set (shadow_mem, asan_shadow_set);
1216 if (STRICT_ALIGNMENT)
1217 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1218 prev_offset = base_offset;
1219 for (l = length; l; l -= 2)
1220 {
1221 if (l == 2)
1222 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1223 offset = offsets[l - 1];
1224 if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
1225 {
1226 int i;
1227 HOST_WIDE_INT aoff
1228 = base_offset + ((offset - base_offset)
1229 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1230 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1231 (aoff - prev_offset)
1232 >> ASAN_SHADOW_SHIFT);
1233 prev_offset = aoff;
1234 for (i = 0; i < 4; i++, aoff += ASAN_SHADOW_GRANULARITY)
1235 if (aoff < offset)
1236 {
1237 if (aoff < offset - (HOST_WIDE_INT)ASAN_SHADOW_GRANULARITY + 1)
1238 shadow_bytes[i] = 0;
1239 else
1240 shadow_bytes[i] = offset - aoff;
1241 }
1242 else
1243 shadow_bytes[i] = ASAN_STACK_MAGIC_MIDDLE;
1244 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1245 offset = aoff;
1246 }
1247 while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
1248 {
1249 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1250 (offset - prev_offset)
1251 >> ASAN_SHADOW_SHIFT);
1252 prev_offset = offset;
1253 memset (shadow_bytes, cur_shadow_byte, 4);
1254 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1255 offset += ASAN_RED_ZONE_SIZE;
1256 }
1257 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1258 }
1259 do_pending_stack_adjust ();
1260
1261 /* Construct epilogue sequence. */
1262 start_sequence ();
1263
1264 lab = NULL;
1265 if (use_after_return_class != -1)
1266 {
1267 rtx_code_label *lab2 = gen_label_rtx ();
1268 char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
1269 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1270 emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
1271 VOIDmode, 0, lab2, very_likely);
1272 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1273 set_mem_alias_set (shadow_mem, asan_shadow_set);
1274 mem = gen_rtx_MEM (ptr_mode, base);
1275 mem = adjust_address (mem, VOIDmode, base_align_bias);
1276 emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
1277 unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
1278 if (use_after_return_class < 5
1279 && can_store_by_pieces (sz, builtin_memset_read_str, &c,
1280 BITS_PER_UNIT, true))
1281 store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
1282 BITS_PER_UNIT, true, 0);
1283 else if (use_after_return_class >= 5
1284 || !set_storage_via_setmem (shadow_mem,
1285 GEN_INT (sz),
1286 gen_int_mode (c, QImode),
1287 BITS_PER_UNIT, BITS_PER_UNIT,
1288 -1, sz, sz, sz))
1289 {
1290 snprintf (buf, sizeof buf, "__asan_stack_free_%d",
1291 use_after_return_class);
1292 ret = init_one_libfunc (buf);
1293 rtx addr = convert_memory_address (ptr_mode, base);
1294 rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
1295 emit_library_call (ret, LCT_NORMAL, ptr_mode, 3, addr, ptr_mode,
1296 GEN_INT (asan_frame_size + base_align_bias),
1297 TYPE_MODE (pointer_sized_int_node),
1298 orig_addr, ptr_mode);
1299 }
1300 lab = gen_label_rtx ();
1301 emit_jump (lab);
1302 emit_label (lab2);
1303 }
1304
1305 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1306 set_mem_alias_set (shadow_mem, asan_shadow_set);
1307
1308 if (STRICT_ALIGNMENT)
1309 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1310
1311 /* Unpoison shadow memory of a stack at the very end of a function.
1312 As we're poisoning stack variables at the end of their scope,
1313 shadow memory must be properly unpoisoned here. The easiest approach
1314 would be to collect all variables that should not be unpoisoned and
1315 we unpoison shadow memory of the whole stack except ranges
1316 occupied by these variables. */
1317 last_offset = base_offset;
1318 HOST_WIDE_INT current_offset = last_offset;
1319 if (length)
1320 {
1321 HOST_WIDE_INT var_end_offset = 0;
1322 HOST_WIDE_INT stack_start = offsets[length - 1];
1323 gcc_assert (last_offset == stack_start);
1324
1325 for (int l = length - 2; l > 0; l -= 2)
1326 {
1327 HOST_WIDE_INT var_offset = offsets[l];
1328 current_offset = var_offset;
1329 var_end_offset = offsets[l - 1];
1330 HOST_WIDE_INT rounded_size = ROUND_UP (var_end_offset - var_offset,
1331 BITS_PER_UNIT);
1332
1333 /* Should we unpoison the variable? */
1334 if (asan_handled_variables != NULL
1335 && asan_handled_variables->contains (decl))
1336 {
1337 if (dump_file && (dump_flags & TDF_DETAILS))
1338 {
1339 const char *n = (DECL_NAME (decl)
1340 ? IDENTIFIER_POINTER (DECL_NAME (decl))
1341 : "<unknown>");
1342 fprintf (dump_file, "Unpoisoning shadow stack for variable: "
1343 "%s (%" PRId64 "B)\n", n,
1344 var_end_offset - var_offset);
1345 }
1346
1347 unsigned HOST_WIDE_INT s
1348 = shadow_mem_size (current_offset - last_offset);
1349 asan_clear_shadow (shadow_mem, s);
1350 HOST_WIDE_INT shift
1351 = shadow_mem_size (current_offset - last_offset + rounded_size);
1352 shadow_mem = adjust_address (shadow_mem, VOIDmode, shift);
1353 last_offset = var_offset + rounded_size;
1354 current_offset = last_offset;
1355 }
1356
1357 }
1358
1359 /* Handle last redzone. */
1360 current_offset = offsets[0];
1361 asan_clear_shadow (shadow_mem,
1362 shadow_mem_size (current_offset - last_offset));
1363 }
1364
1365 /* Clean-up set with instrumented stack variables. */
1366 delete asan_handled_variables;
1367 asan_handled_variables = NULL;
1368 delete asan_used_labels;
1369 asan_used_labels = NULL;
1370
1371 do_pending_stack_adjust ();
1372 if (lab)
1373 emit_label (lab);
1374
1375 insns = get_insns ();
1376 end_sequence ();
1377 return insns;
1378 }
1379
1380 /* Return true if DECL, a global var, might be overridden and needs
1381 therefore a local alias. */
1382
1383 static bool
1384 asan_needs_local_alias (tree decl)
1385 {
1386 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1387 }
1388
1389 /* Return true if DECL, a global var, is an artificial ODR indicator symbol
1390 therefore doesn't need protection. */
1391
1392 static bool
1393 is_odr_indicator (tree decl)
1394 {
1395 return (DECL_ARTIFICIAL (decl)
1396 && lookup_attribute ("asan odr indicator", DECL_ATTRIBUTES (decl)));
1397 }
1398
1399 /* Return true if DECL is a VAR_DECL that should be protected
1400 by Address Sanitizer, by appending a red zone with protected
1401 shadow memory after it and aligning it to at least
1402 ASAN_RED_ZONE_SIZE bytes. */
1403
1404 bool
1405 asan_protect_global (tree decl)
1406 {
1407 if (!ASAN_GLOBALS)
1408 return false;
1409
1410 rtx rtl, symbol;
1411
1412 if (TREE_CODE (decl) == STRING_CST)
1413 {
1414 /* Instrument all STRING_CSTs except those created
1415 by asan_pp_string here. */
1416 if (shadow_ptr_types[0] != NULL_TREE
1417 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1418 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1419 return false;
1420 return true;
1421 }
1422 if (!VAR_P (decl)
1423 /* TLS vars aren't statically protectable. */
1424 || DECL_THREAD_LOCAL_P (decl)
1425 /* Externs will be protected elsewhere. */
1426 || DECL_EXTERNAL (decl)
1427 || !DECL_RTL_SET_P (decl)
1428 /* Comdat vars pose an ABI problem, we can't know if
1429 the var that is selected by the linker will have
1430 padding or not. */
1431 || DECL_ONE_ONLY (decl)
1432 /* Similarly for common vars. People can use -fno-common.
1433 Note: Linux kernel is built with -fno-common, so we do instrument
1434 globals there even if it is C. */
1435 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
1436 /* Don't protect if using user section, often vars placed
1437 into user section from multiple TUs are then assumed
1438 to be an array of such vars, putting padding in there
1439 breaks this assumption. */
1440 || (DECL_SECTION_NAME (decl) != NULL
1441 && !symtab_node::get (decl)->implicit_section
1442 && !section_sanitized_p (DECL_SECTION_NAME (decl)))
1443 || DECL_SIZE (decl) == 0
1444 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1445 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1446 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
1447 || TREE_TYPE (decl) == ubsan_get_source_location_type ()
1448 || is_odr_indicator (decl))
1449 return false;
1450
1451 rtl = DECL_RTL (decl);
1452 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1453 return false;
1454 symbol = XEXP (rtl, 0);
1455
1456 if (CONSTANT_POOL_ADDRESS_P (symbol)
1457 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1458 return false;
1459
1460 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1461 return false;
1462
1463 #ifndef ASM_OUTPUT_DEF
1464 if (asan_needs_local_alias (decl))
1465 return false;
1466 #endif
1467
1468 return true;
1469 }
1470
1471 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1472 IS_STORE is either 1 (for a store) or 0 (for a load). */
1473
1474 static tree
1475 report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1476 int *nargs)
1477 {
1478 static enum built_in_function report[2][2][6]
1479 = { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1480 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1481 BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
1482 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1483 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1484 BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } },
1485 { { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT,
1486 BUILT_IN_ASAN_REPORT_LOAD2_NOABORT,
1487 BUILT_IN_ASAN_REPORT_LOAD4_NOABORT,
1488 BUILT_IN_ASAN_REPORT_LOAD8_NOABORT,
1489 BUILT_IN_ASAN_REPORT_LOAD16_NOABORT,
1490 BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT },
1491 { BUILT_IN_ASAN_REPORT_STORE1_NOABORT,
1492 BUILT_IN_ASAN_REPORT_STORE2_NOABORT,
1493 BUILT_IN_ASAN_REPORT_STORE4_NOABORT,
1494 BUILT_IN_ASAN_REPORT_STORE8_NOABORT,
1495 BUILT_IN_ASAN_REPORT_STORE16_NOABORT,
1496 BUILT_IN_ASAN_REPORT_STORE_N_NOABORT } } };
1497 if (size_in_bytes == -1)
1498 {
1499 *nargs = 2;
1500 return builtin_decl_implicit (report[recover_p][is_store][5]);
1501 }
1502 *nargs = 1;
1503 int size_log2 = exact_log2 (size_in_bytes);
1504 return builtin_decl_implicit (report[recover_p][is_store][size_log2]);
1505 }
1506
1507 /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1508 IS_STORE is either 1 (for a store) or 0 (for a load). */
1509
1510 static tree
1511 check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1512 int *nargs)
1513 {
1514 static enum built_in_function check[2][2][6]
1515 = { { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
1516 BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
1517 BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
1518 { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
1519 BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
1520 BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } },
1521 { { BUILT_IN_ASAN_LOAD1_NOABORT,
1522 BUILT_IN_ASAN_LOAD2_NOABORT,
1523 BUILT_IN_ASAN_LOAD4_NOABORT,
1524 BUILT_IN_ASAN_LOAD8_NOABORT,
1525 BUILT_IN_ASAN_LOAD16_NOABORT,
1526 BUILT_IN_ASAN_LOADN_NOABORT },
1527 { BUILT_IN_ASAN_STORE1_NOABORT,
1528 BUILT_IN_ASAN_STORE2_NOABORT,
1529 BUILT_IN_ASAN_STORE4_NOABORT,
1530 BUILT_IN_ASAN_STORE8_NOABORT,
1531 BUILT_IN_ASAN_STORE16_NOABORT,
1532 BUILT_IN_ASAN_STOREN_NOABORT } } };
1533 if (size_in_bytes == -1)
1534 {
1535 *nargs = 2;
1536 return builtin_decl_implicit (check[recover_p][is_store][5]);
1537 }
1538 *nargs = 1;
1539 int size_log2 = exact_log2 (size_in_bytes);
1540 return builtin_decl_implicit (check[recover_p][is_store][size_log2]);
1541 }
1542
1543 /* Split the current basic block and create a condition statement
1544 insertion point right before or after the statement pointed to by
1545 ITER. Return an iterator to the point at which the caller might
1546 safely insert the condition statement.
1547
1548 THEN_BLOCK must be set to the address of an uninitialized instance
1549 of basic_block. The function will then set *THEN_BLOCK to the
1550 'then block' of the condition statement to be inserted by the
1551 caller.
1552
1553 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1554 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1555
1556 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1557 block' of the condition statement to be inserted by the caller.
1558
1559 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1560 statements starting from *ITER, and *THEN_BLOCK is a new empty
1561 block.
1562
1563 *ITER is adjusted to point to always point to the first statement
1564 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1565 same as what ITER was pointing to prior to calling this function,
1566 if BEFORE_P is true; otherwise, it is its following statement. */
1567
1568 gimple_stmt_iterator
1569 create_cond_insert_point (gimple_stmt_iterator *iter,
1570 bool before_p,
1571 bool then_more_likely_p,
1572 bool create_then_fallthru_edge,
1573 basic_block *then_block,
1574 basic_block *fallthrough_block)
1575 {
1576 gimple_stmt_iterator gsi = *iter;
1577
1578 if (!gsi_end_p (gsi) && before_p)
1579 gsi_prev (&gsi);
1580
1581 basic_block cur_bb = gsi_bb (*iter);
1582
1583 edge e = split_block (cur_bb, gsi_stmt (gsi));
1584
1585 /* Get a hold on the 'condition block', the 'then block' and the
1586 'else block'. */
1587 basic_block cond_bb = e->src;
1588 basic_block fallthru_bb = e->dest;
1589 basic_block then_bb = create_empty_bb (cond_bb);
1590 if (current_loops)
1591 {
1592 add_bb_to_loop (then_bb, cond_bb->loop_father);
1593 loops_state_set (LOOPS_NEED_FIXUP);
1594 }
1595
1596 /* Set up the newly created 'then block'. */
1597 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1598 int fallthrough_probability
1599 = then_more_likely_p
1600 ? PROB_VERY_UNLIKELY
1601 : PROB_ALWAYS - PROB_VERY_UNLIKELY;
1602 e->probability = PROB_ALWAYS - fallthrough_probability;
1603 if (create_then_fallthru_edge)
1604 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1605
1606 /* Set up the fallthrough basic block. */
1607 e = find_edge (cond_bb, fallthru_bb);
1608 e->flags = EDGE_FALSE_VALUE;
1609 e->count = cond_bb->count;
1610 e->probability = fallthrough_probability;
1611
1612 /* Update dominance info for the newly created then_bb; note that
1613 fallthru_bb's dominance info has already been updated by
1614 split_bock. */
1615 if (dom_info_available_p (CDI_DOMINATORS))
1616 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1617
1618 *then_block = then_bb;
1619 *fallthrough_block = fallthru_bb;
1620 *iter = gsi_start_bb (fallthru_bb);
1621
1622 return gsi_last_bb (cond_bb);
1623 }
1624
1625 /* Insert an if condition followed by a 'then block' right before the
1626 statement pointed to by ITER. The fallthrough block -- which is the
1627 else block of the condition as well as the destination of the
1628 outcoming edge of the 'then block' -- starts with the statement
1629 pointed to by ITER.
1630
1631 COND is the condition of the if.
1632
1633 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1634 'then block' is higher than the probability of the edge to the
1635 fallthrough block.
1636
1637 Upon completion of the function, *THEN_BB is set to the newly
1638 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1639 fallthrough block.
1640
1641 *ITER is adjusted to still point to the same statement it was
1642 pointing to initially. */
1643
1644 static void
1645 insert_if_then_before_iter (gcond *cond,
1646 gimple_stmt_iterator *iter,
1647 bool then_more_likely_p,
1648 basic_block *then_bb,
1649 basic_block *fallthrough_bb)
1650 {
1651 gimple_stmt_iterator cond_insert_point =
1652 create_cond_insert_point (iter,
1653 /*before_p=*/true,
1654 then_more_likely_p,
1655 /*create_then_fallthru_edge=*/true,
1656 then_bb,
1657 fallthrough_bb);
1658 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
1659 }
1660
1661 /* Build (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset ().
1662 If RETURN_ADDRESS is set to true, return memory location instread
1663 of a value in the shadow memory. */
1664
1665 static tree
1666 build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
1667 tree base_addr, tree shadow_ptr_type,
1668 bool return_address = false)
1669 {
1670 tree t, uintptr_type = TREE_TYPE (base_addr);
1671 tree shadow_type = TREE_TYPE (shadow_ptr_type);
1672 gimple *g;
1673
1674 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
1675 g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
1676 base_addr, t);
1677 gimple_set_location (g, location);
1678 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1679
1680 t = build_int_cst (uintptr_type, asan_shadow_offset ());
1681 g = gimple_build_assign (make_ssa_name (uintptr_type), PLUS_EXPR,
1682 gimple_assign_lhs (g), t);
1683 gimple_set_location (g, location);
1684 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1685
1686 g = gimple_build_assign (make_ssa_name (shadow_ptr_type), NOP_EXPR,
1687 gimple_assign_lhs (g));
1688 gimple_set_location (g, location);
1689 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1690
1691 if (!return_address)
1692 {
1693 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
1694 build_int_cst (shadow_ptr_type, 0));
1695 g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
1696 gimple_set_location (g, location);
1697 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1698 }
1699
1700 return gimple_assign_lhs (g);
1701 }
1702
1703 /* BASE can already be an SSA_NAME; in that case, do not create a
1704 new SSA_NAME for it. */
1705
1706 static tree
1707 maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
1708 bool before_p)
1709 {
1710 if (TREE_CODE (base) == SSA_NAME)
1711 return base;
1712 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)),
1713 TREE_CODE (base), base);
1714 gimple_set_location (g, loc);
1715 if (before_p)
1716 gsi_insert_before (iter, g, GSI_SAME_STMT);
1717 else
1718 gsi_insert_after (iter, g, GSI_NEW_STMT);
1719 return gimple_assign_lhs (g);
1720 }
1721
1722 /* LEN can already have necessary size and precision;
1723 in that case, do not create a new variable. */
1724
1725 tree
1726 maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
1727 bool before_p)
1728 {
1729 if (ptrofftype_p (len))
1730 return len;
1731 gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
1732 NOP_EXPR, len);
1733 gimple_set_location (g, loc);
1734 if (before_p)
1735 gsi_insert_before (iter, g, GSI_SAME_STMT);
1736 else
1737 gsi_insert_after (iter, g, GSI_NEW_STMT);
1738 return gimple_assign_lhs (g);
1739 }
1740
1741 /* Instrument the memory access instruction BASE. Insert new
1742 statements before or after ITER.
1743
1744 Note that the memory access represented by BASE can be either an
1745 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1746 location. IS_STORE is TRUE for a store, FALSE for a load.
1747 BEFORE_P is TRUE for inserting the instrumentation code before
1748 ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
1749 for a scalar memory access and FALSE for memory region access.
1750 NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
1751 length. ALIGN tells alignment of accessed memory object.
1752
1753 START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
1754 memory region have already been instrumented.
1755
1756 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1757 statement it was pointing to prior to calling this function,
1758 otherwise, it points to the statement logically following it. */
1759
1760 static void
1761 build_check_stmt (location_t loc, tree base, tree len,
1762 HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
1763 bool is_non_zero_len, bool before_p, bool is_store,
1764 bool is_scalar_access, unsigned int align = 0)
1765 {
1766 gimple_stmt_iterator gsi = *iter;
1767 gimple *g;
1768
1769 gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
1770
1771 gsi = *iter;
1772
1773 base = unshare_expr (base);
1774 base = maybe_create_ssa_name (loc, base, &gsi, before_p);
1775
1776 if (len)
1777 {
1778 len = unshare_expr (len);
1779 len = maybe_cast_to_ptrmode (loc, len, iter, before_p);
1780 }
1781 else
1782 {
1783 gcc_assert (size_in_bytes != -1);
1784 len = build_int_cst (pointer_sized_int_node, size_in_bytes);
1785 }
1786
1787 if (size_in_bytes > 1)
1788 {
1789 if ((size_in_bytes & (size_in_bytes - 1)) != 0
1790 || size_in_bytes > 16)
1791 is_scalar_access = false;
1792 else if (align && align < size_in_bytes * BITS_PER_UNIT)
1793 {
1794 /* On non-strict alignment targets, if
1795 16-byte access is just 8-byte aligned,
1796 this will result in misaligned shadow
1797 memory 2 byte load, but otherwise can
1798 be handled using one read. */
1799 if (size_in_bytes != 16
1800 || STRICT_ALIGNMENT
1801 || align < 8 * BITS_PER_UNIT)
1802 is_scalar_access = false;
1803 }
1804 }
1805
1806 HOST_WIDE_INT flags = 0;
1807 if (is_store)
1808 flags |= ASAN_CHECK_STORE;
1809 if (is_non_zero_len)
1810 flags |= ASAN_CHECK_NON_ZERO_LEN;
1811 if (is_scalar_access)
1812 flags |= ASAN_CHECK_SCALAR_ACCESS;
1813
1814 g = gimple_build_call_internal (IFN_ASAN_CHECK, 4,
1815 build_int_cst (integer_type_node, flags),
1816 base, len,
1817 build_int_cst (integer_type_node,
1818 align / BITS_PER_UNIT));
1819 gimple_set_location (g, loc);
1820 if (before_p)
1821 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
1822 else
1823 {
1824 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1825 gsi_next (&gsi);
1826 *iter = gsi;
1827 }
1828 }
1829
1830 /* If T represents a memory access, add instrumentation code before ITER.
1831 LOCATION is source code location.
1832 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
1833
1834 static void
1835 instrument_derefs (gimple_stmt_iterator *iter, tree t,
1836 location_t location, bool is_store)
1837 {
1838 if (is_store && !ASAN_INSTRUMENT_WRITES)
1839 return;
1840 if (!is_store && !ASAN_INSTRUMENT_READS)
1841 return;
1842
1843 tree type, base;
1844 HOST_WIDE_INT size_in_bytes;
1845 if (location == UNKNOWN_LOCATION)
1846 location = EXPR_LOCATION (t);
1847
1848 type = TREE_TYPE (t);
1849 switch (TREE_CODE (t))
1850 {
1851 case ARRAY_REF:
1852 case COMPONENT_REF:
1853 case INDIRECT_REF:
1854 case MEM_REF:
1855 case VAR_DECL:
1856 case BIT_FIELD_REF:
1857 break;
1858 /* FALLTHRU */
1859 default:
1860 return;
1861 }
1862
1863 size_in_bytes = int_size_in_bytes (type);
1864 if (size_in_bytes <= 0)
1865 return;
1866
1867 HOST_WIDE_INT bitsize, bitpos;
1868 tree offset;
1869 machine_mode mode;
1870 int unsignedp, reversep, volatilep = 0;
1871 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
1872 &unsignedp, &reversep, &volatilep);
1873
1874 if (TREE_CODE (t) == COMPONENT_REF
1875 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
1876 {
1877 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
1878 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
1879 TREE_OPERAND (t, 0), repr,
1880 NULL_TREE), location, is_store);
1881 return;
1882 }
1883
1884 if (bitpos % BITS_PER_UNIT
1885 || bitsize != size_in_bytes * BITS_PER_UNIT)
1886 return;
1887
1888 if (VAR_P (inner)
1889 && offset == NULL_TREE
1890 && bitpos >= 0
1891 && DECL_SIZE (inner)
1892 && tree_fits_shwi_p (DECL_SIZE (inner))
1893 && bitpos + bitsize <= tree_to_shwi (DECL_SIZE (inner)))
1894 {
1895 if (DECL_THREAD_LOCAL_P (inner))
1896 return;
1897 if (!ASAN_GLOBALS && is_global_var (inner))
1898 return;
1899 if (!TREE_STATIC (inner))
1900 {
1901 /* Automatic vars in the current function will be always
1902 accessible. */
1903 if (decl_function_context (inner) == current_function_decl
1904 && (!asan_sanitize_use_after_scope ()
1905 || !TREE_ADDRESSABLE (inner)))
1906 return;
1907 }
1908 /* Always instrument external vars, they might be dynamically
1909 initialized. */
1910 else if (!DECL_EXTERNAL (inner))
1911 {
1912 /* For static vars if they are known not to be dynamically
1913 initialized, they will be always accessible. */
1914 varpool_node *vnode = varpool_node::get (inner);
1915 if (vnode && !vnode->dynamically_initialized)
1916 return;
1917 }
1918 }
1919
1920 base = build_fold_addr_expr (t);
1921 if (!has_mem_ref_been_instrumented (base, size_in_bytes))
1922 {
1923 unsigned int align = get_object_alignment (t);
1924 build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
1925 /*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true,
1926 is_store, /*is_scalar_access*/true, align);
1927 update_mem_ref_hash_table (base, size_in_bytes);
1928 update_mem_ref_hash_table (t, size_in_bytes);
1929 }
1930
1931 }
1932
1933 /* Insert a memory reference into the hash table if access length
1934 can be determined in compile time. */
1935
1936 static void
1937 maybe_update_mem_ref_hash_table (tree base, tree len)
1938 {
1939 if (!POINTER_TYPE_P (TREE_TYPE (base))
1940 || !INTEGRAL_TYPE_P (TREE_TYPE (len)))
1941 return;
1942
1943 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1944
1945 if (size_in_bytes != -1)
1946 update_mem_ref_hash_table (base, size_in_bytes);
1947 }
1948
1949 /* Instrument an access to a contiguous memory region that starts at
1950 the address pointed to by BASE, over a length of LEN (expressed in
1951 the sizeof (*BASE) bytes). ITER points to the instruction before
1952 which the instrumentation instructions must be inserted. LOCATION
1953 is the source location that the instrumentation instructions must
1954 have. If IS_STORE is true, then the memory access is a store;
1955 otherwise, it's a load. */
1956
1957 static void
1958 instrument_mem_region_access (tree base, tree len,
1959 gimple_stmt_iterator *iter,
1960 location_t location, bool is_store)
1961 {
1962 if (!POINTER_TYPE_P (TREE_TYPE (base))
1963 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
1964 || integer_zerop (len))
1965 return;
1966
1967 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1968
1969 if ((size_in_bytes == -1)
1970 || !has_mem_ref_been_instrumented (base, size_in_bytes))
1971 {
1972 build_check_stmt (location, base, len, size_in_bytes, iter,
1973 /*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true,
1974 is_store, /*is_scalar_access*/false, /*align*/0);
1975 }
1976
1977 maybe_update_mem_ref_hash_table (base, len);
1978 *iter = gsi_for_stmt (gsi_stmt (*iter));
1979 }
1980
1981 /* Instrument the call to a built-in memory access function that is
1982 pointed to by the iterator ITER.
1983
1984 Upon completion, return TRUE iff *ITER has been advanced to the
1985 statement following the one it was originally pointing to. */
1986
1987 static bool
1988 instrument_builtin_call (gimple_stmt_iterator *iter)
1989 {
1990 if (!ASAN_MEMINTRIN)
1991 return false;
1992
1993 bool iter_advanced_p = false;
1994 gcall *call = as_a <gcall *> (gsi_stmt (*iter));
1995
1996 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
1997
1998 location_t loc = gimple_location (call);
1999
2000 asan_mem_ref src0, src1, dest;
2001 asan_mem_ref_init (&src0, NULL, 1);
2002 asan_mem_ref_init (&src1, NULL, 1);
2003 asan_mem_ref_init (&dest, NULL, 1);
2004
2005 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
2006 bool src0_is_store = false, src1_is_store = false, dest_is_store = false,
2007 dest_is_deref = false, intercepted_p = true;
2008
2009 if (get_mem_refs_of_builtin_call (call,
2010 &src0, &src0_len, &src0_is_store,
2011 &src1, &src1_len, &src1_is_store,
2012 &dest, &dest_len, &dest_is_store,
2013 &dest_is_deref, &intercepted_p))
2014 {
2015 if (dest_is_deref)
2016 {
2017 instrument_derefs (iter, dest.start, loc, dest_is_store);
2018 gsi_next (iter);
2019 iter_advanced_p = true;
2020 }
2021 else if (!intercepted_p
2022 && (src0_len || src1_len || dest_len))
2023 {
2024 if (src0.start != NULL_TREE)
2025 instrument_mem_region_access (src0.start, src0_len,
2026 iter, loc, /*is_store=*/false);
2027 if (src1.start != NULL_TREE)
2028 instrument_mem_region_access (src1.start, src1_len,
2029 iter, loc, /*is_store=*/false);
2030 if (dest.start != NULL_TREE)
2031 instrument_mem_region_access (dest.start, dest_len,
2032 iter, loc, /*is_store=*/true);
2033
2034 *iter = gsi_for_stmt (call);
2035 gsi_next (iter);
2036 iter_advanced_p = true;
2037 }
2038 else
2039 {
2040 if (src0.start != NULL_TREE)
2041 maybe_update_mem_ref_hash_table (src0.start, src0_len);
2042 if (src1.start != NULL_TREE)
2043 maybe_update_mem_ref_hash_table (src1.start, src1_len);
2044 if (dest.start != NULL_TREE)
2045 maybe_update_mem_ref_hash_table (dest.start, dest_len);
2046 }
2047 }
2048 return iter_advanced_p;
2049 }
2050
2051 /* Instrument the assignment statement ITER if it is subject to
2052 instrumentation. Return TRUE iff instrumentation actually
2053 happened. In that case, the iterator ITER is advanced to the next
2054 logical expression following the one initially pointed to by ITER,
2055 and the relevant memory reference that which access has been
2056 instrumented is added to the memory references hash table. */
2057
2058 static bool
2059 maybe_instrument_assignment (gimple_stmt_iterator *iter)
2060 {
2061 gimple *s = gsi_stmt (*iter);
2062
2063 gcc_assert (gimple_assign_single_p (s));
2064
2065 tree ref_expr = NULL_TREE;
2066 bool is_store, is_instrumented = false;
2067
2068 if (gimple_store_p (s))
2069 {
2070 ref_expr = gimple_assign_lhs (s);
2071 is_store = true;
2072 instrument_derefs (iter, ref_expr,
2073 gimple_location (s),
2074 is_store);
2075 is_instrumented = true;
2076 }
2077
2078 if (gimple_assign_load_p (s))
2079 {
2080 ref_expr = gimple_assign_rhs1 (s);
2081 is_store = false;
2082 instrument_derefs (iter, ref_expr,
2083 gimple_location (s),
2084 is_store);
2085 is_instrumented = true;
2086 }
2087
2088 if (is_instrumented)
2089 gsi_next (iter);
2090
2091 return is_instrumented;
2092 }
2093
2094 /* Instrument the function call pointed to by the iterator ITER, if it
2095 is subject to instrumentation. At the moment, the only function
2096 calls that are instrumented are some built-in functions that access
2097 memory. Look at instrument_builtin_call to learn more.
2098
2099 Upon completion return TRUE iff *ITER was advanced to the statement
2100 following the one it was originally pointing to. */
2101
2102 static bool
2103 maybe_instrument_call (gimple_stmt_iterator *iter)
2104 {
2105 gimple *stmt = gsi_stmt (*iter);
2106 bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
2107
2108 if (is_builtin && instrument_builtin_call (iter))
2109 return true;
2110
2111 if (gimple_call_noreturn_p (stmt))
2112 {
2113 if (is_builtin)
2114 {
2115 tree callee = gimple_call_fndecl (stmt);
2116 switch (DECL_FUNCTION_CODE (callee))
2117 {
2118 case BUILT_IN_UNREACHABLE:
2119 case BUILT_IN_TRAP:
2120 /* Don't instrument these. */
2121 return false;
2122 default:
2123 break;
2124 }
2125 }
2126 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
2127 gimple *g = gimple_build_call (decl, 0);
2128 gimple_set_location (g, gimple_location (stmt));
2129 gsi_insert_before (iter, g, GSI_SAME_STMT);
2130 }
2131
2132 bool instrumented = false;
2133 if (gimple_store_p (stmt))
2134 {
2135 tree ref_expr = gimple_call_lhs (stmt);
2136 instrument_derefs (iter, ref_expr,
2137 gimple_location (stmt),
2138 /*is_store=*/true);
2139
2140 instrumented = true;
2141 }
2142
2143 /* Walk through gimple_call arguments and check them id needed. */
2144 unsigned args_num = gimple_call_num_args (stmt);
2145 for (unsigned i = 0; i < args_num; ++i)
2146 {
2147 tree arg = gimple_call_arg (stmt, i);
2148 /* If ARG is not a non-aggregate register variable, compiler in general
2149 creates temporary for it and pass it as argument to gimple call.
2150 But in some cases, e.g. when we pass by value a small structure that
2151 fits to register, compiler can avoid extra overhead by pulling out
2152 these temporaries. In this case, we should check the argument. */
2153 if (!is_gimple_reg (arg) && !is_gimple_min_invariant (arg))
2154 {
2155 instrument_derefs (iter, arg,
2156 gimple_location (stmt),
2157 /*is_store=*/false);
2158 instrumented = true;
2159 }
2160 }
2161 if (instrumented)
2162 gsi_next (iter);
2163 return instrumented;
2164 }
2165
2166 /* Walk each instruction of all basic block and instrument those that
2167 represent memory references: loads, stores, or function calls.
2168 In a given basic block, this function avoids instrumenting memory
2169 references that have already been instrumented. */
2170
2171 static void
2172 transform_statements (void)
2173 {
2174 basic_block bb, last_bb = NULL;
2175 gimple_stmt_iterator i;
2176 int saved_last_basic_block = last_basic_block_for_fn (cfun);
2177
2178 FOR_EACH_BB_FN (bb, cfun)
2179 {
2180 basic_block prev_bb = bb;
2181
2182 if (bb->index >= saved_last_basic_block) continue;
2183
2184 /* Flush the mem ref hash table, if current bb doesn't have
2185 exactly one predecessor, or if that predecessor (skipping
2186 over asan created basic blocks) isn't the last processed
2187 basic block. Thus we effectively flush on extended basic
2188 block boundaries. */
2189 while (single_pred_p (prev_bb))
2190 {
2191 prev_bb = single_pred (prev_bb);
2192 if (prev_bb->index < saved_last_basic_block)
2193 break;
2194 }
2195 if (prev_bb != last_bb)
2196 empty_mem_ref_hash_table ();
2197 last_bb = bb;
2198
2199 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
2200 {
2201 gimple *s = gsi_stmt (i);
2202
2203 if (has_stmt_been_instrumented_p (s))
2204 gsi_next (&i);
2205 else if (gimple_assign_single_p (s)
2206 && !gimple_clobber_p (s)
2207 && maybe_instrument_assignment (&i))
2208 /* Nothing to do as maybe_instrument_assignment advanced
2209 the iterator I. */;
2210 else if (is_gimple_call (s) && maybe_instrument_call (&i))
2211 /* Nothing to do as maybe_instrument_call
2212 advanced the iterator I. */;
2213 else
2214 {
2215 /* No instrumentation happened.
2216
2217 If the current instruction is a function call that
2218 might free something, let's forget about the memory
2219 references that got instrumented. Otherwise we might
2220 miss some instrumentation opportunities. Do the same
2221 for a ASAN_MARK poisoning internal function. */
2222 if (is_gimple_call (s)
2223 && (!nonfreeing_call_p (s)
2224 || asan_mark_p (s, ASAN_MARK_POISON)))
2225 empty_mem_ref_hash_table ();
2226
2227 gsi_next (&i);
2228 }
2229 }
2230 }
2231 free_mem_ref_resources ();
2232 }
2233
2234 /* Build
2235 __asan_before_dynamic_init (module_name)
2236 or
2237 __asan_after_dynamic_init ()
2238 call. */
2239
2240 tree
2241 asan_dynamic_init_call (bool after_p)
2242 {
2243 if (shadow_ptr_types[0] == NULL_TREE)
2244 asan_init_shadow_ptr_types ();
2245
2246 tree fn = builtin_decl_implicit (after_p
2247 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2248 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
2249 tree module_name_cst = NULL_TREE;
2250 if (!after_p)
2251 {
2252 pretty_printer module_name_pp;
2253 pp_string (&module_name_pp, main_input_filename);
2254
2255 module_name_cst = asan_pp_string (&module_name_pp);
2256 module_name_cst = fold_convert (const_ptr_type_node,
2257 module_name_cst);
2258 }
2259
2260 return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
2261 }
2262
2263 /* Build
2264 struct __asan_global
2265 {
2266 const void *__beg;
2267 uptr __size;
2268 uptr __size_with_redzone;
2269 const void *__name;
2270 const void *__module_name;
2271 uptr __has_dynamic_init;
2272 __asan_global_source_location *__location;
2273 char *__odr_indicator;
2274 } type. */
2275
2276 static tree
2277 asan_global_struct (void)
2278 {
2279 static const char *field_names[]
2280 = { "__beg", "__size", "__size_with_redzone",
2281 "__name", "__module_name", "__has_dynamic_init", "__location",
2282 "__odr_indicator" };
2283 tree fields[ARRAY_SIZE (field_names)], ret;
2284 unsigned i;
2285
2286 ret = make_node (RECORD_TYPE);
2287 for (i = 0; i < ARRAY_SIZE (field_names); i++)
2288 {
2289 fields[i]
2290 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2291 get_identifier (field_names[i]),
2292 (i == 0 || i == 3) ? const_ptr_type_node
2293 : pointer_sized_int_node);
2294 DECL_CONTEXT (fields[i]) = ret;
2295 if (i)
2296 DECL_CHAIN (fields[i - 1]) = fields[i];
2297 }
2298 tree type_decl = build_decl (input_location, TYPE_DECL,
2299 get_identifier ("__asan_global"), ret);
2300 DECL_IGNORED_P (type_decl) = 1;
2301 DECL_ARTIFICIAL (type_decl) = 1;
2302 TYPE_FIELDS (ret) = fields[0];
2303 TYPE_NAME (ret) = type_decl;
2304 TYPE_STUB_DECL (ret) = type_decl;
2305 layout_type (ret);
2306 return ret;
2307 }
2308
2309 /* Create and return odr indicator symbol for DECL.
2310 TYPE is __asan_global struct type as returned by asan_global_struct. */
2311
2312 static tree
2313 create_odr_indicator (tree decl, tree type)
2314 {
2315 char *name;
2316 tree uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2317 tree decl_name
2318 = (HAS_DECL_ASSEMBLER_NAME_P (decl) ? DECL_ASSEMBLER_NAME (decl)
2319 : DECL_NAME (decl));
2320 /* DECL_NAME theoretically might be NULL. Bail out with 0 in this case. */
2321 if (decl_name == NULL_TREE)
2322 return build_int_cst (uptr, 0);
2323 size_t len = strlen (IDENTIFIER_POINTER (decl_name)) + sizeof ("__odr_asan_");
2324 name = XALLOCAVEC (char, len);
2325 snprintf (name, len, "__odr_asan_%s", IDENTIFIER_POINTER (decl_name));
2326 #ifndef NO_DOT_IN_LABEL
2327 name[sizeof ("__odr_asan") - 1] = '.';
2328 #elif !defined(NO_DOLLAR_IN_LABEL)
2329 name[sizeof ("__odr_asan") - 1] = '$';
2330 #endif
2331 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (name),
2332 char_type_node);
2333 TREE_ADDRESSABLE (var) = 1;
2334 TREE_READONLY (var) = 0;
2335 TREE_THIS_VOLATILE (var) = 1;
2336 DECL_GIMPLE_REG_P (var) = 0;
2337 DECL_ARTIFICIAL (var) = 1;
2338 DECL_IGNORED_P (var) = 1;
2339 TREE_STATIC (var) = 1;
2340 TREE_PUBLIC (var) = 1;
2341 DECL_VISIBILITY (var) = DECL_VISIBILITY (decl);
2342 DECL_VISIBILITY_SPECIFIED (var) = DECL_VISIBILITY_SPECIFIED (decl);
2343
2344 TREE_USED (var) = 1;
2345 tree ctor = build_constructor_va (TREE_TYPE (var), 1, NULL_TREE,
2346 build_int_cst (unsigned_type_node, 0));
2347 TREE_CONSTANT (ctor) = 1;
2348 TREE_STATIC (ctor) = 1;
2349 DECL_INITIAL (var) = ctor;
2350 DECL_ATTRIBUTES (var) = tree_cons (get_identifier ("asan odr indicator"),
2351 NULL, DECL_ATTRIBUTES (var));
2352 make_decl_rtl (var);
2353 varpool_node::finalize_decl (var);
2354 return fold_convert (uptr, build_fold_addr_expr (var));
2355 }
2356
2357 /* Return true if DECL, a global var, might be overridden and needs
2358 an additional odr indicator symbol. */
2359
2360 static bool
2361 asan_needs_odr_indicator_p (tree decl)
2362 {
2363 return !DECL_ARTIFICIAL (decl) && !DECL_WEAK (decl) && TREE_PUBLIC (decl);
2364 }
2365
2366 /* Append description of a single global DECL into vector V.
2367 TYPE is __asan_global struct type as returned by asan_global_struct. */
2368
2369 static void
2370 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
2371 {
2372 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2373 unsigned HOST_WIDE_INT size;
2374 tree str_cst, module_name_cst, refdecl = decl;
2375 vec<constructor_elt, va_gc> *vinner = NULL;
2376
2377 pretty_printer asan_pp, module_name_pp;
2378
2379 if (DECL_NAME (decl))
2380 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
2381 else
2382 pp_string (&asan_pp, "<unknown>");
2383 str_cst = asan_pp_string (&asan_pp);
2384
2385 pp_string (&module_name_pp, main_input_filename);
2386 module_name_cst = asan_pp_string (&module_name_pp);
2387
2388 if (asan_needs_local_alias (decl))
2389 {
2390 char buf[20];
2391 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
2392 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2393 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2394 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2395 TREE_READONLY (refdecl) = TREE_READONLY (decl);
2396 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2397 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2398 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2399 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2400 TREE_STATIC (refdecl) = 1;
2401 TREE_PUBLIC (refdecl) = 0;
2402 TREE_USED (refdecl) = 1;
2403 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2404 }
2405
2406 tree odr_indicator_ptr
2407 = (asan_needs_odr_indicator_p (decl) ? create_odr_indicator (decl, type)
2408 : build_int_cst (uptr, 0));
2409 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2410 fold_convert (const_ptr_type_node,
2411 build_fold_addr_expr (refdecl)));
2412 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2413 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2414 size += asan_red_zone_size (size);
2415 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2416 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2417 fold_convert (const_ptr_type_node, str_cst));
2418 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2419 fold_convert (const_ptr_type_node, module_name_cst));
2420 varpool_node *vnode = varpool_node::get (decl);
2421 int has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
2422 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2423 build_int_cst (uptr, has_dynamic_init));
2424 tree locptr = NULL_TREE;
2425 location_t loc = DECL_SOURCE_LOCATION (decl);
2426 expanded_location xloc = expand_location (loc);
2427 if (xloc.file != NULL)
2428 {
2429 static int lasanloccnt = 0;
2430 char buf[25];
2431 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt);
2432 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2433 ubsan_get_source_location_type ());
2434 TREE_STATIC (var) = 1;
2435 TREE_PUBLIC (var) = 0;
2436 DECL_ARTIFICIAL (var) = 1;
2437 DECL_IGNORED_P (var) = 1;
2438 pretty_printer filename_pp;
2439 pp_string (&filename_pp, xloc.file);
2440 tree str = asan_pp_string (&filename_pp);
2441 tree ctor = build_constructor_va (TREE_TYPE (var), 3,
2442 NULL_TREE, str, NULL_TREE,
2443 build_int_cst (unsigned_type_node,
2444 xloc.line), NULL_TREE,
2445 build_int_cst (unsigned_type_node,
2446 xloc.column));
2447 TREE_CONSTANT (ctor) = 1;
2448 TREE_STATIC (ctor) = 1;
2449 DECL_INITIAL (var) = ctor;
2450 varpool_node::finalize_decl (var);
2451 locptr = fold_convert (uptr, build_fold_addr_expr (var));
2452 }
2453 else
2454 locptr = build_int_cst (uptr, 0);
2455 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr);
2456 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, odr_indicator_ptr);
2457 init = build_constructor (type, vinner);
2458 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2459 }
2460
2461 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2462 void
2463 initialize_sanitizer_builtins (void)
2464 {
2465 tree decl;
2466
2467 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2468 return;
2469
2470 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2471 tree BT_FN_VOID_PTR
2472 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2473 tree BT_FN_VOID_CONST_PTR
2474 = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
2475 tree BT_FN_VOID_PTR_PTR
2476 = build_function_type_list (void_type_node, ptr_type_node,
2477 ptr_type_node, NULL_TREE);
2478 tree BT_FN_VOID_PTR_PTR_PTR
2479 = build_function_type_list (void_type_node, ptr_type_node,
2480 ptr_type_node, ptr_type_node, NULL_TREE);
2481 tree BT_FN_VOID_PTR_PTRMODE
2482 = build_function_type_list (void_type_node, ptr_type_node,
2483 pointer_sized_int_node, NULL_TREE);
2484 tree BT_FN_VOID_INT
2485 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
2486 tree BT_FN_SIZE_CONST_PTR_INT
2487 = build_function_type_list (size_type_node, const_ptr_type_node,
2488 integer_type_node, NULL_TREE);
2489 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2490 tree BT_FN_IX_CONST_VPTR_INT[5];
2491 tree BT_FN_IX_VPTR_IX_INT[5];
2492 tree BT_FN_VOID_VPTR_IX_INT[5];
2493 tree vptr
2494 = build_pointer_type (build_qualified_type (void_type_node,
2495 TYPE_QUAL_VOLATILE));
2496 tree cvptr
2497 = build_pointer_type (build_qualified_type (void_type_node,
2498 TYPE_QUAL_VOLATILE
2499 |TYPE_QUAL_CONST));
2500 tree boolt
2501 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2502 int i;
2503 for (i = 0; i < 5; i++)
2504 {
2505 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2506 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2507 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
2508 integer_type_node, integer_type_node,
2509 NULL_TREE);
2510 BT_FN_IX_CONST_VPTR_INT[i]
2511 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2512 BT_FN_IX_VPTR_IX_INT[i]
2513 = build_function_type_list (ix, vptr, ix, integer_type_node,
2514 NULL_TREE);
2515 BT_FN_VOID_VPTR_IX_INT[i]
2516 = build_function_type_list (void_type_node, vptr, ix,
2517 integer_type_node, NULL_TREE);
2518 }
2519 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2520 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2521 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2522 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2523 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2524 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2525 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2526 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2527 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2528 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2529 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2530 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2531 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2532 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2533 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2534 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2535 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2536 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2537 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2538 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2539 #undef ATTR_NOTHROW_LEAF_LIST
2540 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2541 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2542 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2543 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2544 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2545 #undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2546 #define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
2547 ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
2548 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2549 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2550 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2551 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2552 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2553 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2554 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2555 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2556 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2557 #undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
2558 #define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
2559 /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2560 #undef ATTR_PURE_NOTHROW_LEAF_LIST
2561 #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
2562 #undef DEF_BUILTIN_STUB
2563 #define DEF_BUILTIN_STUB(ENUM, NAME)
2564 #undef DEF_SANITIZER_BUILTIN
2565 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2566 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2567 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2568 set_call_expr_flags (decl, ATTRS); \
2569 set_builtin_decl (ENUM, decl, true);
2570
2571 #include "sanitizer.def"
2572
2573 /* -fsanitize=object-size uses __builtin_object_size, but that might
2574 not be available for e.g. Fortran at this point. We use
2575 DEF_SANITIZER_BUILTIN here only as a convenience macro. */
2576 if ((flag_sanitize & SANITIZE_OBJECT_SIZE)
2577 && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE))
2578 DEF_SANITIZER_BUILTIN (BUILT_IN_OBJECT_SIZE, "object_size",
2579 BT_FN_SIZE_CONST_PTR_INT,
2580 ATTR_PURE_NOTHROW_LEAF_LIST)
2581
2582 #undef DEF_SANITIZER_BUILTIN
2583 #undef DEF_BUILTIN_STUB
2584 }
2585
2586 /* Called via htab_traverse. Count number of emitted
2587 STRING_CSTs in the constant hash table. */
2588
2589 int
2590 count_string_csts (constant_descriptor_tree **slot,
2591 unsigned HOST_WIDE_INT *data)
2592 {
2593 struct constant_descriptor_tree *desc = *slot;
2594 if (TREE_CODE (desc->value) == STRING_CST
2595 && TREE_ASM_WRITTEN (desc->value)
2596 && asan_protect_global (desc->value))
2597 ++*data;
2598 return 1;
2599 }
2600
2601 /* Helper structure to pass two parameters to
2602 add_string_csts. */
2603
2604 struct asan_add_string_csts_data
2605 {
2606 tree type;
2607 vec<constructor_elt, va_gc> *v;
2608 };
2609
2610 /* Called via hash_table::traverse. Call asan_add_global
2611 on emitted STRING_CSTs from the constant hash table. */
2612
2613 int
2614 add_string_csts (constant_descriptor_tree **slot,
2615 asan_add_string_csts_data *aascd)
2616 {
2617 struct constant_descriptor_tree *desc = *slot;
2618 if (TREE_CODE (desc->value) == STRING_CST
2619 && TREE_ASM_WRITTEN (desc->value)
2620 && asan_protect_global (desc->value))
2621 {
2622 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
2623 aascd->type, aascd->v);
2624 }
2625 return 1;
2626 }
2627
2628 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2629 invoke ggc_collect. */
2630 static GTY(()) tree asan_ctor_statements;
2631
2632 /* Module-level instrumentation.
2633 - Insert __asan_init_vN() into the list of CTORs.
2634 - TODO: insert redzones around globals.
2635 */
2636
2637 void
2638 asan_finish_file (void)
2639 {
2640 varpool_node *vnode;
2641 unsigned HOST_WIDE_INT gcount = 0;
2642
2643 if (shadow_ptr_types[0] == NULL_TREE)
2644 asan_init_shadow_ptr_types ();
2645 /* Avoid instrumenting code in the asan ctors/dtors.
2646 We don't need to insert padding after the description strings,
2647 nor after .LASAN* array. */
2648 flag_sanitize &= ~SANITIZE_ADDRESS;
2649
2650 /* For user-space we want asan constructors to run first.
2651 Linux kernel does not support priorities other than default, and the only
2652 other user of constructors is coverage. So we run with the default
2653 priority. */
2654 int priority = flag_sanitize & SANITIZE_USER_ADDRESS
2655 ? MAX_RESERVED_INIT_PRIORITY - 1 : DEFAULT_INIT_PRIORITY;
2656
2657 if (flag_sanitize & SANITIZE_USER_ADDRESS)
2658 {
2659 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
2660 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2661 fn = builtin_decl_implicit (BUILT_IN_ASAN_VERSION_MISMATCH_CHECK);
2662 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2663 }
2664 FOR_EACH_DEFINED_VARIABLE (vnode)
2665 if (TREE_ASM_WRITTEN (vnode->decl)
2666 && asan_protect_global (vnode->decl))
2667 ++gcount;
2668 hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab ();
2669 const_desc_htab->traverse<unsigned HOST_WIDE_INT *, count_string_csts>
2670 (&gcount);
2671 if (gcount)
2672 {
2673 tree type = asan_global_struct (), var, ctor;
2674 tree dtor_statements = NULL_TREE;
2675 vec<constructor_elt, va_gc> *v;
2676 char buf[20];
2677
2678 type = build_array_type_nelts (type, gcount);
2679 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
2680 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2681 type);
2682 TREE_STATIC (var) = 1;
2683 TREE_PUBLIC (var) = 0;
2684 DECL_ARTIFICIAL (var) = 1;
2685 DECL_IGNORED_P (var) = 1;
2686 vec_alloc (v, gcount);
2687 FOR_EACH_DEFINED_VARIABLE (vnode)
2688 if (TREE_ASM_WRITTEN (vnode->decl)
2689 && asan_protect_global (vnode->decl))
2690 asan_add_global (vnode->decl, TREE_TYPE (type), v);
2691 struct asan_add_string_csts_data aascd;
2692 aascd.type = TREE_TYPE (type);
2693 aascd.v = v;
2694 const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts>
2695 (&aascd);
2696 ctor = build_constructor (type, v);
2697 TREE_CONSTANT (ctor) = 1;
2698 TREE_STATIC (ctor) = 1;
2699 DECL_INITIAL (var) = ctor;
2700 varpool_node::finalize_decl (var);
2701
2702 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
2703 tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
2704 append_to_statement_list (build_call_expr (fn, 2,
2705 build_fold_addr_expr (var),
2706 gcount_tree),
2707 &asan_ctor_statements);
2708
2709 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
2710 append_to_statement_list (build_call_expr (fn, 2,
2711 build_fold_addr_expr (var),
2712 gcount_tree),
2713 &dtor_statements);
2714 cgraph_build_static_cdtor ('D', dtor_statements, priority);
2715 }
2716 if (asan_ctor_statements)
2717 cgraph_build_static_cdtor ('I', asan_ctor_statements, priority);
2718 flag_sanitize |= SANITIZE_ADDRESS;
2719 }
2720
2721 /* Poison or unpoison (depending on IS_CLOBBER variable) shadow memory based
2722 on SHADOW address. Newly added statements will be added to ITER with
2723 given location LOC. We mark SIZE bytes in shadow memory, where
2724 LAST_CHUNK_SIZE is greater than zero in situation where we are at the
2725 end of a variable. */
2726
2727 static void
2728 asan_store_shadow_bytes (gimple_stmt_iterator *iter, location_t loc,
2729 tree shadow,
2730 unsigned HOST_WIDE_INT base_addr_offset,
2731 bool is_clobber, unsigned size,
2732 unsigned last_chunk_size)
2733 {
2734 tree shadow_ptr_type;
2735
2736 switch (size)
2737 {
2738 case 1:
2739 shadow_ptr_type = shadow_ptr_types[0];
2740 break;
2741 case 2:
2742 shadow_ptr_type = shadow_ptr_types[1];
2743 break;
2744 case 4:
2745 shadow_ptr_type = shadow_ptr_types[2];
2746 break;
2747 default:
2748 gcc_unreachable ();
2749 }
2750
2751 unsigned char c = (char) is_clobber ? ASAN_STACK_MAGIC_USE_AFTER_SCOPE : 0;
2752 unsigned HOST_WIDE_INT val = 0;
2753 for (unsigned i = 0; i < size; ++i)
2754 {
2755 unsigned char shadow_c = c;
2756 if (i == size - 1 && last_chunk_size && !is_clobber)
2757 shadow_c = last_chunk_size;
2758 val |= (unsigned HOST_WIDE_INT) shadow_c << (BITS_PER_UNIT * i);
2759 }
2760
2761 /* Handle last chunk in unpoisoning. */
2762 tree magic = build_int_cst (TREE_TYPE (shadow_ptr_type), val);
2763
2764 tree dest = build2 (MEM_REF, TREE_TYPE (shadow_ptr_type), shadow,
2765 build_int_cst (shadow_ptr_type, base_addr_offset));
2766
2767 gimple *g = gimple_build_assign (dest, magic);
2768 gimple_set_location (g, loc);
2769 gsi_insert_after (iter, g, GSI_NEW_STMT);
2770 }
2771
2772 /* Expand the ASAN_MARK builtins. */
2773
2774 bool
2775 asan_expand_mark_ifn (gimple_stmt_iterator *iter)
2776 {
2777 gimple *g = gsi_stmt (*iter);
2778 location_t loc = gimple_location (g);
2779 HOST_WIDE_INT flag = tree_to_shwi (gimple_call_arg (g, 0));
2780 bool is_poison = ((asan_mark_flags)flag) == ASAN_MARK_POISON;
2781
2782 tree base = gimple_call_arg (g, 1);
2783 gcc_checking_assert (TREE_CODE (base) == ADDR_EXPR);
2784 tree decl = TREE_OPERAND (base, 0);
2785
2786 /* For a nested function, we can have: ASAN_MARK (2, &FRAME.2.fp_input, 4) */
2787 if (TREE_CODE (decl) == COMPONENT_REF
2788 && DECL_NONLOCAL_FRAME (TREE_OPERAND (decl, 0)))
2789 decl = TREE_OPERAND (decl, 0);
2790
2791 gcc_checking_assert (TREE_CODE (decl) == VAR_DECL);
2792 if (asan_handled_variables == NULL)
2793 asan_handled_variables = new hash_set<tree> (16);
2794 asan_handled_variables->add (decl);
2795 tree len = gimple_call_arg (g, 2);
2796
2797 gcc_assert (tree_fits_shwi_p (len));
2798 unsigned HOST_WIDE_INT size_in_bytes = tree_to_shwi (len);
2799 gcc_assert (size_in_bytes);
2800
2801 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2802 NOP_EXPR, base);
2803 gimple_set_location (g, loc);
2804 gsi_replace (iter, g, false);
2805 tree base_addr = gimple_assign_lhs (g);
2806
2807 /* Generate direct emission if size_in_bytes is small. */
2808 if (size_in_bytes <= ASAN_PARAM_USE_AFTER_SCOPE_DIRECT_EMISSION_THRESHOLD)
2809 {
2810 unsigned HOST_WIDE_INT shadow_size = shadow_mem_size (size_in_bytes);
2811
2812 tree shadow = build_shadow_mem_access (iter, loc, base_addr,
2813 shadow_ptr_types[0], true);
2814
2815 for (unsigned HOST_WIDE_INT offset = 0; offset < shadow_size;)
2816 {
2817 unsigned size = 1;
2818 if (shadow_size - offset >= 4)
2819 size = 4;
2820 else if (shadow_size - offset >= 2)
2821 size = 2;
2822
2823 unsigned HOST_WIDE_INT last_chunk_size = 0;
2824 unsigned HOST_WIDE_INT s = (offset + size) * ASAN_SHADOW_GRANULARITY;
2825 if (s > size_in_bytes)
2826 last_chunk_size = ASAN_SHADOW_GRANULARITY - (s - size_in_bytes);
2827
2828 asan_store_shadow_bytes (iter, loc, shadow, offset, is_poison,
2829 size, last_chunk_size);
2830 offset += size;
2831 }
2832 }
2833 else
2834 {
2835 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2836 NOP_EXPR, len);
2837 gimple_set_location (g, loc);
2838 gsi_insert_before (iter, g, GSI_SAME_STMT);
2839 tree sz_arg = gimple_assign_lhs (g);
2840
2841 tree fun = builtin_decl_implicit (is_poison ? BUILT_IN_ASAN_CLOBBER_N
2842 : BUILT_IN_ASAN_UNCLOBBER_N);
2843 g = gimple_build_call (fun, 2, base_addr, sz_arg);
2844 gimple_set_location (g, loc);
2845 gsi_insert_after (iter, g, GSI_NEW_STMT);
2846 }
2847
2848 return false;
2849 }
2850
2851 /* Expand the ASAN_{LOAD,STORE} builtins. */
2852
2853 bool
2854 asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
2855 {
2856 gimple *g = gsi_stmt (*iter);
2857 location_t loc = gimple_location (g);
2858 bool recover_p;
2859 if (flag_sanitize & SANITIZE_USER_ADDRESS)
2860 recover_p = (flag_sanitize_recover & SANITIZE_USER_ADDRESS) != 0;
2861 else
2862 recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
2863
2864 HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
2865 gcc_assert (flags < ASAN_CHECK_LAST);
2866 bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
2867 bool is_store = (flags & ASAN_CHECK_STORE) != 0;
2868 bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
2869
2870 tree base = gimple_call_arg (g, 1);
2871 tree len = gimple_call_arg (g, 2);
2872 HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3));
2873
2874 HOST_WIDE_INT size_in_bytes
2875 = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2876
2877 if (use_calls)
2878 {
2879 /* Instrument using callbacks. */
2880 gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2881 NOP_EXPR, base);
2882 gimple_set_location (g, loc);
2883 gsi_insert_before (iter, g, GSI_SAME_STMT);
2884 tree base_addr = gimple_assign_lhs (g);
2885
2886 int nargs;
2887 tree fun = check_func (is_store, recover_p, size_in_bytes, &nargs);
2888 if (nargs == 1)
2889 g = gimple_build_call (fun, 1, base_addr);
2890 else
2891 {
2892 gcc_assert (nargs == 2);
2893 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2894 NOP_EXPR, len);
2895 gimple_set_location (g, loc);
2896 gsi_insert_before (iter, g, GSI_SAME_STMT);
2897 tree sz_arg = gimple_assign_lhs (g);
2898 g = gimple_build_call (fun, nargs, base_addr, sz_arg);
2899 }
2900 gimple_set_location (g, loc);
2901 gsi_replace (iter, g, false);
2902 return false;
2903 }
2904
2905 HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
2906
2907 tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
2908 tree shadow_type = TREE_TYPE (shadow_ptr_type);
2909
2910 gimple_stmt_iterator gsi = *iter;
2911
2912 if (!is_non_zero_len)
2913 {
2914 /* So, the length of the memory area to asan-protect is
2915 non-constant. Let's guard the generated instrumentation code
2916 like:
2917
2918 if (len != 0)
2919 {
2920 //asan instrumentation code goes here.
2921 }
2922 // falltrough instructions, starting with *ITER. */
2923
2924 g = gimple_build_cond (NE_EXPR,
2925 len,
2926 build_int_cst (TREE_TYPE (len), 0),
2927 NULL_TREE, NULL_TREE);
2928 gimple_set_location (g, loc);
2929
2930 basic_block then_bb, fallthrough_bb;
2931 insert_if_then_before_iter (as_a <gcond *> (g), iter,
2932 /*then_more_likely_p=*/true,
2933 &then_bb, &fallthrough_bb);
2934 /* Note that fallthrough_bb starts with the statement that was
2935 pointed to by ITER. */
2936
2937 /* The 'then block' of the 'if (len != 0) condition is where
2938 we'll generate the asan instrumentation code now. */
2939 gsi = gsi_last_bb (then_bb);
2940 }
2941
2942 /* Get an iterator on the point where we can add the condition
2943 statement for the instrumentation. */
2944 basic_block then_bb, else_bb;
2945 gsi = create_cond_insert_point (&gsi, /*before_p*/false,
2946 /*then_more_likely_p=*/false,
2947 /*create_then_fallthru_edge*/recover_p,
2948 &then_bb,
2949 &else_bb);
2950
2951 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2952 NOP_EXPR, base);
2953 gimple_set_location (g, loc);
2954 gsi_insert_before (&gsi, g, GSI_NEW_STMT);
2955 tree base_addr = gimple_assign_lhs (g);
2956
2957 tree t = NULL_TREE;
2958 if (real_size_in_bytes >= 8)
2959 {
2960 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2961 shadow_ptr_type);
2962 t = shadow;
2963 }
2964 else
2965 {
2966 /* Slow path for 1, 2 and 4 byte accesses. */
2967 /* Test (shadow != 0)
2968 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
2969 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2970 shadow_ptr_type);
2971 gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
2972 gimple_seq seq = NULL;
2973 gimple_seq_add_stmt (&seq, shadow_test);
2974 /* Aligned (>= 8 bytes) can test just
2975 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
2976 to be 0. */
2977 if (align < 8)
2978 {
2979 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2980 base_addr, 7));
2981 gimple_seq_add_stmt (&seq,
2982 build_type_cast (shadow_type,
2983 gimple_seq_last (seq)));
2984 if (real_size_in_bytes > 1)
2985 gimple_seq_add_stmt (&seq,
2986 build_assign (PLUS_EXPR,
2987 gimple_seq_last (seq),
2988 real_size_in_bytes - 1));
2989 t = gimple_assign_lhs (gimple_seq_last_stmt (seq));
2990 }
2991 else
2992 t = build_int_cst (shadow_type, real_size_in_bytes - 1);
2993 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow));
2994 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2995 gimple_seq_last (seq)));
2996 t = gimple_assign_lhs (gimple_seq_last (seq));
2997 gimple_seq_set_location (seq, loc);
2998 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2999
3000 /* For non-constant, misaligned or otherwise weird access sizes,
3001 check first and last byte. */
3002 if (size_in_bytes == -1)
3003 {
3004 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3005 MINUS_EXPR, len,
3006 build_int_cst (pointer_sized_int_node, 1));
3007 gimple_set_location (g, loc);
3008 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3009 tree last = gimple_assign_lhs (g);
3010 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3011 PLUS_EXPR, base_addr, last);
3012 gimple_set_location (g, loc);
3013 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3014 tree base_end_addr = gimple_assign_lhs (g);
3015
3016 tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
3017 shadow_ptr_type);
3018 gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
3019 gimple_seq seq = NULL;
3020 gimple_seq_add_stmt (&seq, shadow_test);
3021 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
3022 base_end_addr, 7));
3023 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
3024 gimple_seq_last (seq)));
3025 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
3026 gimple_seq_last (seq),
3027 shadow));
3028 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
3029 gimple_seq_last (seq)));
3030 gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
3031 gimple_seq_last (seq)));
3032 t = gimple_assign_lhs (gimple_seq_last (seq));
3033 gimple_seq_set_location (seq, loc);
3034 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3035 }
3036 }
3037
3038 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
3039 NULL_TREE, NULL_TREE);
3040 gimple_set_location (g, loc);
3041 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3042
3043 /* Generate call to the run-time library (e.g. __asan_report_load8). */
3044 gsi = gsi_start_bb (then_bb);
3045 int nargs;
3046 tree fun = report_error_func (is_store, recover_p, size_in_bytes, &nargs);
3047 g = gimple_build_call (fun, nargs, base_addr, len);
3048 gimple_set_location (g, loc);
3049 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3050
3051 gsi_remove (iter, true);
3052 *iter = gsi_start_bb (else_bb);
3053
3054 return true;
3055 }
3056
3057 /* Instrument the current function. */
3058
3059 static unsigned int
3060 asan_instrument (void)
3061 {
3062 if (shadow_ptr_types[0] == NULL_TREE)
3063 asan_init_shadow_ptr_types ();
3064 transform_statements ();
3065 return 0;
3066 }
3067
3068 static bool
3069 gate_asan (void)
3070 {
3071 return (flag_sanitize & SANITIZE_ADDRESS) != 0
3072 && !lookup_attribute ("no_sanitize_address",
3073 DECL_ATTRIBUTES (current_function_decl));
3074 }
3075
3076 namespace {
3077
3078 const pass_data pass_data_asan =
3079 {
3080 GIMPLE_PASS, /* type */
3081 "asan", /* name */
3082 OPTGROUP_NONE, /* optinfo_flags */
3083 TV_NONE, /* tv_id */
3084 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
3085 0, /* properties_provided */
3086 0, /* properties_destroyed */
3087 0, /* todo_flags_start */
3088 TODO_update_ssa, /* todo_flags_finish */
3089 };
3090
3091 class pass_asan : public gimple_opt_pass
3092 {
3093 public:
3094 pass_asan (gcc::context *ctxt)
3095 : gimple_opt_pass (pass_data_asan, ctxt)
3096 {}
3097
3098 /* opt_pass methods: */
3099 opt_pass * clone () { return new pass_asan (m_ctxt); }
3100 virtual bool gate (function *) { return gate_asan (); }
3101 virtual unsigned int execute (function *) { return asan_instrument (); }
3102
3103 }; // class pass_asan
3104
3105 } // anon namespace
3106
3107 gimple_opt_pass *
3108 make_pass_asan (gcc::context *ctxt)
3109 {
3110 return new pass_asan (ctxt);
3111 }
3112
3113 namespace {
3114
3115 const pass_data pass_data_asan_O0 =
3116 {
3117 GIMPLE_PASS, /* type */
3118 "asan0", /* name */
3119 OPTGROUP_NONE, /* optinfo_flags */
3120 TV_NONE, /* tv_id */
3121 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
3122 0, /* properties_provided */
3123 0, /* properties_destroyed */
3124 0, /* todo_flags_start */
3125 TODO_update_ssa, /* todo_flags_finish */
3126 };
3127
3128 class pass_asan_O0 : public gimple_opt_pass
3129 {
3130 public:
3131 pass_asan_O0 (gcc::context *ctxt)
3132 : gimple_opt_pass (pass_data_asan_O0, ctxt)
3133 {}
3134
3135 /* opt_pass methods: */
3136 virtual bool gate (function *) { return !optimize && gate_asan (); }
3137 virtual unsigned int execute (function *) { return asan_instrument (); }
3138
3139 }; // class pass_asan_O0
3140
3141 } // anon namespace
3142
3143 gimple_opt_pass *
3144 make_pass_asan_O0 (gcc::context *ctxt)
3145 {
3146 return new pass_asan_O0 (ctxt);
3147 }
3148
3149 #include "gt-asan.h"