alias.c: Reorder #include statements and remove duplicates.
[gcc.git] / gcc / asan.c
1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2015 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "target.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "cfghooks.h"
31 #include "alloc-pool.h"
32 #include "tree-pass.h"
33 #include "tm_p.h"
34 #include "stringpool.h"
35 #include "tree-ssanames.h"
36 #include "expmed.h"
37 #include "optabs.h"
38 #include "emit-rtl.h"
39 #include "cgraph.h"
40 #include "gimple-pretty-print.h"
41 #include "alias.h"
42 #include "fold-const.h"
43 #include "cfganal.h"
44 #include "internal-fn.h"
45 #include "gimplify.h"
46 #include "gimple-iterator.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "stor-layout.h"
50 #include "tree-iterator.h"
51 #include "asan.h"
52 #include "flags.h"
53 #include "dojump.h"
54 #include "explow.h"
55 #include "stmt.h"
56 #include "expr.h"
57 #include "output.h"
58 #include "langhooks.h"
59 #include "cfgloop.h"
60 #include "gimple-builder.h"
61 #include "ubsan.h"
62 #include "params.h"
63 #include "builtins.h"
64 #include "fnmatch.h"
65
66 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
67 with <2x slowdown on average.
68
69 The tool consists of two parts:
70 instrumentation module (this file) and a run-time library.
71 The instrumentation module adds a run-time check before every memory insn.
72 For a 8- or 16- byte load accessing address X:
73 ShadowAddr = (X >> 3) + Offset
74 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
75 if (ShadowValue)
76 __asan_report_load8(X);
77 For a load of N bytes (N=1, 2 or 4) from address X:
78 ShadowAddr = (X >> 3) + Offset
79 ShadowValue = *(char*)ShadowAddr;
80 if (ShadowValue)
81 if ((X & 7) + N - 1 > ShadowValue)
82 __asan_report_loadN(X);
83 Stores are instrumented similarly, but using __asan_report_storeN functions.
84 A call too __asan_init_vN() is inserted to the list of module CTORs.
85 N is the version number of the AddressSanitizer API. The changes between the
86 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
87
88 The run-time library redefines malloc (so that redzone are inserted around
89 the allocated memory) and free (so that reuse of free-ed memory is delayed),
90 provides __asan_report* and __asan_init_vN functions.
91
92 Read more:
93 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
94
95 The current implementation supports detection of out-of-bounds and
96 use-after-free in the heap, on the stack and for global variables.
97
98 [Protection of stack variables]
99
100 To understand how detection of out-of-bounds and use-after-free works
101 for stack variables, lets look at this example on x86_64 where the
102 stack grows downward:
103
104 int
105 foo ()
106 {
107 char a[23] = {0};
108 int b[2] = {0};
109
110 a[5] = 1;
111 b[1] = 2;
112
113 return a[5] + b[1];
114 }
115
116 For this function, the stack protected by asan will be organized as
117 follows, from the top of the stack to the bottom:
118
119 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
120
121 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
122 the next slot be 32 bytes aligned; this one is called Partial
123 Redzone; this 32 bytes alignment is an asan constraint]
124
125 Slot 3/ [24 bytes for variable 'a']
126
127 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
128
129 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
130
131 Slot 6/ [8 bytes for variable 'b']
132
133 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
134 'LEFT RedZone']
135
136 The 32 bytes of LEFT red zone at the bottom of the stack can be
137 decomposed as such:
138
139 1/ The first 8 bytes contain a magical asan number that is always
140 0x41B58AB3.
141
142 2/ The following 8 bytes contains a pointer to a string (to be
143 parsed at runtime by the runtime asan library), which format is
144 the following:
145
146 "<function-name> <space> <num-of-variables-on-the-stack>
147 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
148 <length-of-var-in-bytes> ){n} "
149
150 where '(...){n}' means the content inside the parenthesis occurs 'n'
151 times, with 'n' being the number of variables on the stack.
152
153 3/ The following 8 bytes contain the PC of the current function which
154 will be used by the run-time library to print an error message.
155
156 4/ The following 8 bytes are reserved for internal use by the run-time.
157
158 The shadow memory for that stack layout is going to look like this:
159
160 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
161 The F1 byte pattern is a magic number called
162 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
163 the memory for that shadow byte is part of a the LEFT red zone
164 intended to seat at the bottom of the variables on the stack.
165
166 - content of shadow memory 8 bytes for slots 6 and 5:
167 0xF4F4F400. The F4 byte pattern is a magic number
168 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
169 memory region for this shadow byte is a PARTIAL red zone
170 intended to pad a variable A, so that the slot following
171 {A,padding} is 32 bytes aligned.
172
173 Note that the fact that the least significant byte of this
174 shadow memory content is 00 means that 8 bytes of its
175 corresponding memory (which corresponds to the memory of
176 variable 'b') is addressable.
177
178 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
179 The F2 byte pattern is a magic number called
180 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
181 region for this shadow byte is a MIDDLE red zone intended to
182 seat between two 32 aligned slots of {variable,padding}.
183
184 - content of shadow memory 8 bytes for slot 3 and 2:
185 0xF4000000. This represents is the concatenation of
186 variable 'a' and the partial red zone following it, like what we
187 had for variable 'b'. The least significant 3 bytes being 00
188 means that the 3 bytes of variable 'a' are addressable.
189
190 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
191 The F3 byte pattern is a magic number called
192 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
193 region for this shadow byte is a RIGHT red zone intended to seat
194 at the top of the variables of the stack.
195
196 Note that the real variable layout is done in expand_used_vars in
197 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
198 stack variables as well as the different red zones, emits some
199 prologue code to populate the shadow memory as to poison (mark as
200 non-accessible) the regions of the red zones and mark the regions of
201 stack variables as accessible, and emit some epilogue code to
202 un-poison (mark as accessible) the regions of red zones right before
203 the function exits.
204
205 [Protection of global variables]
206
207 The basic idea is to insert a red zone between two global variables
208 and install a constructor function that calls the asan runtime to do
209 the populating of the relevant shadow memory regions at load time.
210
211 So the global variables are laid out as to insert a red zone between
212 them. The size of the red zones is so that each variable starts on a
213 32 bytes boundary.
214
215 Then a constructor function is installed so that, for each global
216 variable, it calls the runtime asan library function
217 __asan_register_globals_with an instance of this type:
218
219 struct __asan_global
220 {
221 // Address of the beginning of the global variable.
222 const void *__beg;
223
224 // Initial size of the global variable.
225 uptr __size;
226
227 // Size of the global variable + size of the red zone. This
228 // size is 32 bytes aligned.
229 uptr __size_with_redzone;
230
231 // Name of the global variable.
232 const void *__name;
233
234 // Name of the module where the global variable is declared.
235 const void *__module_name;
236
237 // 1 if it has dynamic initialization, 0 otherwise.
238 uptr __has_dynamic_init;
239
240 // A pointer to struct that contains source location, could be NULL.
241 __asan_global_source_location *__location;
242 }
243
244 A destructor function that calls the runtime asan library function
245 _asan_unregister_globals is also installed. */
246
247 static unsigned HOST_WIDE_INT asan_shadow_offset_value;
248 static bool asan_shadow_offset_computed;
249 static vec<char *> sanitized_sections;
250
251 /* Sets shadow offset to value in string VAL. */
252
253 bool
254 set_asan_shadow_offset (const char *val)
255 {
256 char *endp;
257
258 errno = 0;
259 #ifdef HAVE_LONG_LONG
260 asan_shadow_offset_value = strtoull (val, &endp, 0);
261 #else
262 asan_shadow_offset_value = strtoul (val, &endp, 0);
263 #endif
264 if (!(*val != '\0' && *endp == '\0' && errno == 0))
265 return false;
266
267 asan_shadow_offset_computed = true;
268
269 return true;
270 }
271
272 /* Set list of user-defined sections that need to be sanitized. */
273
274 void
275 set_sanitized_sections (const char *sections)
276 {
277 char *pat;
278 unsigned i;
279 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
280 free (pat);
281 sanitized_sections.truncate (0);
282
283 for (const char *s = sections; *s; )
284 {
285 const char *end;
286 for (end = s; *end && *end != ','; ++end);
287 size_t len = end - s;
288 sanitized_sections.safe_push (xstrndup (s, len));
289 s = *end ? end + 1 : end;
290 }
291 }
292
293 /* Checks whether section SEC should be sanitized. */
294
295 static bool
296 section_sanitized_p (const char *sec)
297 {
298 char *pat;
299 unsigned i;
300 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
301 if (fnmatch (pat, sec, FNM_PERIOD) == 0)
302 return true;
303 return false;
304 }
305
306 /* Returns Asan shadow offset. */
307
308 static unsigned HOST_WIDE_INT
309 asan_shadow_offset ()
310 {
311 if (!asan_shadow_offset_computed)
312 {
313 asan_shadow_offset_computed = true;
314 asan_shadow_offset_value = targetm.asan_shadow_offset ();
315 }
316 return asan_shadow_offset_value;
317 }
318
319 alias_set_type asan_shadow_set = -1;
320
321 /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
322 alias set is used for all shadow memory accesses. */
323 static GTY(()) tree shadow_ptr_types[2];
324
325 /* Decl for __asan_option_detect_stack_use_after_return. */
326 static GTY(()) tree asan_detect_stack_use_after_return;
327
328 /* Various flags for Asan builtins. */
329 enum asan_check_flags
330 {
331 ASAN_CHECK_STORE = 1 << 0,
332 ASAN_CHECK_SCALAR_ACCESS = 1 << 1,
333 ASAN_CHECK_NON_ZERO_LEN = 1 << 2,
334 ASAN_CHECK_LAST = 1 << 3
335 };
336
337 /* Hashtable support for memory references used by gimple
338 statements. */
339
340 /* This type represents a reference to a memory region. */
341 struct asan_mem_ref
342 {
343 /* The expression of the beginning of the memory region. */
344 tree start;
345
346 /* The size of the access. */
347 HOST_WIDE_INT access_size;
348 };
349
350 object_allocator <asan_mem_ref> asan_mem_ref_pool ("asan_mem_ref");
351
352 /* Initializes an instance of asan_mem_ref. */
353
354 static void
355 asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
356 {
357 ref->start = start;
358 ref->access_size = access_size;
359 }
360
361 /* Allocates memory for an instance of asan_mem_ref into the memory
362 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
363 START is the address of (or the expression pointing to) the
364 beginning of memory reference. ACCESS_SIZE is the size of the
365 access to the referenced memory. */
366
367 static asan_mem_ref*
368 asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
369 {
370 asan_mem_ref *ref = asan_mem_ref_pool.allocate ();
371
372 asan_mem_ref_init (ref, start, access_size);
373 return ref;
374 }
375
376 /* This builds and returns a pointer to the end of the memory region
377 that starts at START and of length LEN. */
378
379 tree
380 asan_mem_ref_get_end (tree start, tree len)
381 {
382 if (len == NULL_TREE || integer_zerop (len))
383 return start;
384
385 if (!ptrofftype_p (len))
386 len = convert_to_ptrofftype (len);
387
388 return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
389 }
390
391 /* Return a tree expression that represents the end of the referenced
392 memory region. Beware that this function can actually build a new
393 tree expression. */
394
395 tree
396 asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
397 {
398 return asan_mem_ref_get_end (ref->start, len);
399 }
400
401 struct asan_mem_ref_hasher : nofree_ptr_hash <asan_mem_ref>
402 {
403 static inline hashval_t hash (const asan_mem_ref *);
404 static inline bool equal (const asan_mem_ref *, const asan_mem_ref *);
405 };
406
407 /* Hash a memory reference. */
408
409 inline hashval_t
410 asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
411 {
412 return iterative_hash_expr (mem_ref->start, 0);
413 }
414
415 /* Compare two memory references. We accept the length of either
416 memory references to be NULL_TREE. */
417
418 inline bool
419 asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
420 const asan_mem_ref *m2)
421 {
422 return operand_equal_p (m1->start, m2->start, 0);
423 }
424
425 static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
426
427 /* Returns a reference to the hash table containing memory references.
428 This function ensures that the hash table is created. Note that
429 this hash table is updated by the function
430 update_mem_ref_hash_table. */
431
432 static hash_table<asan_mem_ref_hasher> *
433 get_mem_ref_hash_table ()
434 {
435 if (!asan_mem_ref_ht)
436 asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
437
438 return asan_mem_ref_ht;
439 }
440
441 /* Clear all entries from the memory references hash table. */
442
443 static void
444 empty_mem_ref_hash_table ()
445 {
446 if (asan_mem_ref_ht)
447 asan_mem_ref_ht->empty ();
448 }
449
450 /* Free the memory references hash table. */
451
452 static void
453 free_mem_ref_resources ()
454 {
455 delete asan_mem_ref_ht;
456 asan_mem_ref_ht = NULL;
457
458 asan_mem_ref_pool.release ();
459 }
460
461 /* Return true iff the memory reference REF has been instrumented. */
462
463 static bool
464 has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
465 {
466 asan_mem_ref r;
467 asan_mem_ref_init (&r, ref, access_size);
468
469 asan_mem_ref *saved_ref = get_mem_ref_hash_table ()->find (&r);
470 return saved_ref && saved_ref->access_size >= access_size;
471 }
472
473 /* Return true iff the memory reference REF has been instrumented. */
474
475 static bool
476 has_mem_ref_been_instrumented (const asan_mem_ref *ref)
477 {
478 return has_mem_ref_been_instrumented (ref->start, ref->access_size);
479 }
480
481 /* Return true iff access to memory region starting at REF and of
482 length LEN has been instrumented. */
483
484 static bool
485 has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
486 {
487 HOST_WIDE_INT size_in_bytes
488 = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
489
490 return size_in_bytes != -1
491 && has_mem_ref_been_instrumented (ref->start, size_in_bytes);
492 }
493
494 /* Set REF to the memory reference present in a gimple assignment
495 ASSIGNMENT. Return true upon successful completion, false
496 otherwise. */
497
498 static bool
499 get_mem_ref_of_assignment (const gassign *assignment,
500 asan_mem_ref *ref,
501 bool *ref_is_store)
502 {
503 gcc_assert (gimple_assign_single_p (assignment));
504
505 if (gimple_store_p (assignment)
506 && !gimple_clobber_p (assignment))
507 {
508 ref->start = gimple_assign_lhs (assignment);
509 *ref_is_store = true;
510 }
511 else if (gimple_assign_load_p (assignment))
512 {
513 ref->start = gimple_assign_rhs1 (assignment);
514 *ref_is_store = false;
515 }
516 else
517 return false;
518
519 ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
520 return true;
521 }
522
523 /* Return the memory references contained in a gimple statement
524 representing a builtin call that has to do with memory access. */
525
526 static bool
527 get_mem_refs_of_builtin_call (const gcall *call,
528 asan_mem_ref *src0,
529 tree *src0_len,
530 bool *src0_is_store,
531 asan_mem_ref *src1,
532 tree *src1_len,
533 bool *src1_is_store,
534 asan_mem_ref *dst,
535 tree *dst_len,
536 bool *dst_is_store,
537 bool *dest_is_deref,
538 bool *intercepted_p)
539 {
540 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
541
542 tree callee = gimple_call_fndecl (call);
543 tree source0 = NULL_TREE, source1 = NULL_TREE,
544 dest = NULL_TREE, len = NULL_TREE;
545 bool is_store = true, got_reference_p = false;
546 HOST_WIDE_INT access_size = 1;
547
548 *intercepted_p = asan_intercepted_p ((DECL_FUNCTION_CODE (callee)));
549
550 switch (DECL_FUNCTION_CODE (callee))
551 {
552 /* (s, s, n) style memops. */
553 case BUILT_IN_BCMP:
554 case BUILT_IN_MEMCMP:
555 source0 = gimple_call_arg (call, 0);
556 source1 = gimple_call_arg (call, 1);
557 len = gimple_call_arg (call, 2);
558 break;
559
560 /* (src, dest, n) style memops. */
561 case BUILT_IN_BCOPY:
562 source0 = gimple_call_arg (call, 0);
563 dest = gimple_call_arg (call, 1);
564 len = gimple_call_arg (call, 2);
565 break;
566
567 /* (dest, src, n) style memops. */
568 case BUILT_IN_MEMCPY:
569 case BUILT_IN_MEMCPY_CHK:
570 case BUILT_IN_MEMMOVE:
571 case BUILT_IN_MEMMOVE_CHK:
572 case BUILT_IN_MEMPCPY:
573 case BUILT_IN_MEMPCPY_CHK:
574 dest = gimple_call_arg (call, 0);
575 source0 = gimple_call_arg (call, 1);
576 len = gimple_call_arg (call, 2);
577 break;
578
579 /* (dest, n) style memops. */
580 case BUILT_IN_BZERO:
581 dest = gimple_call_arg (call, 0);
582 len = gimple_call_arg (call, 1);
583 break;
584
585 /* (dest, x, n) style memops*/
586 case BUILT_IN_MEMSET:
587 case BUILT_IN_MEMSET_CHK:
588 dest = gimple_call_arg (call, 0);
589 len = gimple_call_arg (call, 2);
590 break;
591
592 case BUILT_IN_STRLEN:
593 source0 = gimple_call_arg (call, 0);
594 len = gimple_call_lhs (call);
595 break ;
596
597 /* And now the __atomic* and __sync builtins.
598 These are handled differently from the classical memory memory
599 access builtins above. */
600
601 case BUILT_IN_ATOMIC_LOAD_1:
602 case BUILT_IN_ATOMIC_LOAD_2:
603 case BUILT_IN_ATOMIC_LOAD_4:
604 case BUILT_IN_ATOMIC_LOAD_8:
605 case BUILT_IN_ATOMIC_LOAD_16:
606 is_store = false;
607 /* fall through. */
608
609 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
610 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
611 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
612 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
613 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
614
615 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
616 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
617 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
618 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
619 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
620
621 case BUILT_IN_SYNC_FETCH_AND_OR_1:
622 case BUILT_IN_SYNC_FETCH_AND_OR_2:
623 case BUILT_IN_SYNC_FETCH_AND_OR_4:
624 case BUILT_IN_SYNC_FETCH_AND_OR_8:
625 case BUILT_IN_SYNC_FETCH_AND_OR_16:
626
627 case BUILT_IN_SYNC_FETCH_AND_AND_1:
628 case BUILT_IN_SYNC_FETCH_AND_AND_2:
629 case BUILT_IN_SYNC_FETCH_AND_AND_4:
630 case BUILT_IN_SYNC_FETCH_AND_AND_8:
631 case BUILT_IN_SYNC_FETCH_AND_AND_16:
632
633 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
634 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
635 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
636 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
637 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
638
639 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
640 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
641 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
642 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
643
644 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
645 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
646 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
647 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
648 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
649
650 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
651 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
652 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
653 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
654 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
655
656 case BUILT_IN_SYNC_OR_AND_FETCH_1:
657 case BUILT_IN_SYNC_OR_AND_FETCH_2:
658 case BUILT_IN_SYNC_OR_AND_FETCH_4:
659 case BUILT_IN_SYNC_OR_AND_FETCH_8:
660 case BUILT_IN_SYNC_OR_AND_FETCH_16:
661
662 case BUILT_IN_SYNC_AND_AND_FETCH_1:
663 case BUILT_IN_SYNC_AND_AND_FETCH_2:
664 case BUILT_IN_SYNC_AND_AND_FETCH_4:
665 case BUILT_IN_SYNC_AND_AND_FETCH_8:
666 case BUILT_IN_SYNC_AND_AND_FETCH_16:
667
668 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
669 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
670 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
671 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
672 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
673
674 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
675 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
676 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
677 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
678
679 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
680 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
681 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
682 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
683 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
684
685 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
686 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
687 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
688 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
689 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
690
691 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
692 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
693 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
694 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
695 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
696
697 case BUILT_IN_SYNC_LOCK_RELEASE_1:
698 case BUILT_IN_SYNC_LOCK_RELEASE_2:
699 case BUILT_IN_SYNC_LOCK_RELEASE_4:
700 case BUILT_IN_SYNC_LOCK_RELEASE_8:
701 case BUILT_IN_SYNC_LOCK_RELEASE_16:
702
703 case BUILT_IN_ATOMIC_EXCHANGE_1:
704 case BUILT_IN_ATOMIC_EXCHANGE_2:
705 case BUILT_IN_ATOMIC_EXCHANGE_4:
706 case BUILT_IN_ATOMIC_EXCHANGE_8:
707 case BUILT_IN_ATOMIC_EXCHANGE_16:
708
709 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
710 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
711 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
712 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
713 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
714
715 case BUILT_IN_ATOMIC_STORE_1:
716 case BUILT_IN_ATOMIC_STORE_2:
717 case BUILT_IN_ATOMIC_STORE_4:
718 case BUILT_IN_ATOMIC_STORE_8:
719 case BUILT_IN_ATOMIC_STORE_16:
720
721 case BUILT_IN_ATOMIC_ADD_FETCH_1:
722 case BUILT_IN_ATOMIC_ADD_FETCH_2:
723 case BUILT_IN_ATOMIC_ADD_FETCH_4:
724 case BUILT_IN_ATOMIC_ADD_FETCH_8:
725 case BUILT_IN_ATOMIC_ADD_FETCH_16:
726
727 case BUILT_IN_ATOMIC_SUB_FETCH_1:
728 case BUILT_IN_ATOMIC_SUB_FETCH_2:
729 case BUILT_IN_ATOMIC_SUB_FETCH_4:
730 case BUILT_IN_ATOMIC_SUB_FETCH_8:
731 case BUILT_IN_ATOMIC_SUB_FETCH_16:
732
733 case BUILT_IN_ATOMIC_AND_FETCH_1:
734 case BUILT_IN_ATOMIC_AND_FETCH_2:
735 case BUILT_IN_ATOMIC_AND_FETCH_4:
736 case BUILT_IN_ATOMIC_AND_FETCH_8:
737 case BUILT_IN_ATOMIC_AND_FETCH_16:
738
739 case BUILT_IN_ATOMIC_NAND_FETCH_1:
740 case BUILT_IN_ATOMIC_NAND_FETCH_2:
741 case BUILT_IN_ATOMIC_NAND_FETCH_4:
742 case BUILT_IN_ATOMIC_NAND_FETCH_8:
743 case BUILT_IN_ATOMIC_NAND_FETCH_16:
744
745 case BUILT_IN_ATOMIC_XOR_FETCH_1:
746 case BUILT_IN_ATOMIC_XOR_FETCH_2:
747 case BUILT_IN_ATOMIC_XOR_FETCH_4:
748 case BUILT_IN_ATOMIC_XOR_FETCH_8:
749 case BUILT_IN_ATOMIC_XOR_FETCH_16:
750
751 case BUILT_IN_ATOMIC_OR_FETCH_1:
752 case BUILT_IN_ATOMIC_OR_FETCH_2:
753 case BUILT_IN_ATOMIC_OR_FETCH_4:
754 case BUILT_IN_ATOMIC_OR_FETCH_8:
755 case BUILT_IN_ATOMIC_OR_FETCH_16:
756
757 case BUILT_IN_ATOMIC_FETCH_ADD_1:
758 case BUILT_IN_ATOMIC_FETCH_ADD_2:
759 case BUILT_IN_ATOMIC_FETCH_ADD_4:
760 case BUILT_IN_ATOMIC_FETCH_ADD_8:
761 case BUILT_IN_ATOMIC_FETCH_ADD_16:
762
763 case BUILT_IN_ATOMIC_FETCH_SUB_1:
764 case BUILT_IN_ATOMIC_FETCH_SUB_2:
765 case BUILT_IN_ATOMIC_FETCH_SUB_4:
766 case BUILT_IN_ATOMIC_FETCH_SUB_8:
767 case BUILT_IN_ATOMIC_FETCH_SUB_16:
768
769 case BUILT_IN_ATOMIC_FETCH_AND_1:
770 case BUILT_IN_ATOMIC_FETCH_AND_2:
771 case BUILT_IN_ATOMIC_FETCH_AND_4:
772 case BUILT_IN_ATOMIC_FETCH_AND_8:
773 case BUILT_IN_ATOMIC_FETCH_AND_16:
774
775 case BUILT_IN_ATOMIC_FETCH_NAND_1:
776 case BUILT_IN_ATOMIC_FETCH_NAND_2:
777 case BUILT_IN_ATOMIC_FETCH_NAND_4:
778 case BUILT_IN_ATOMIC_FETCH_NAND_8:
779 case BUILT_IN_ATOMIC_FETCH_NAND_16:
780
781 case BUILT_IN_ATOMIC_FETCH_XOR_1:
782 case BUILT_IN_ATOMIC_FETCH_XOR_2:
783 case BUILT_IN_ATOMIC_FETCH_XOR_4:
784 case BUILT_IN_ATOMIC_FETCH_XOR_8:
785 case BUILT_IN_ATOMIC_FETCH_XOR_16:
786
787 case BUILT_IN_ATOMIC_FETCH_OR_1:
788 case BUILT_IN_ATOMIC_FETCH_OR_2:
789 case BUILT_IN_ATOMIC_FETCH_OR_4:
790 case BUILT_IN_ATOMIC_FETCH_OR_8:
791 case BUILT_IN_ATOMIC_FETCH_OR_16:
792 {
793 dest = gimple_call_arg (call, 0);
794 /* DEST represents the address of a memory location.
795 instrument_derefs wants the memory location, so lets
796 dereference the address DEST before handing it to
797 instrument_derefs. */
798 if (TREE_CODE (dest) == ADDR_EXPR)
799 dest = TREE_OPERAND (dest, 0);
800 else if (TREE_CODE (dest) == SSA_NAME || TREE_CODE (dest) == INTEGER_CST)
801 dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
802 dest, build_int_cst (TREE_TYPE (dest), 0));
803 else
804 gcc_unreachable ();
805
806 access_size = int_size_in_bytes (TREE_TYPE (dest));
807 }
808
809 default:
810 /* The other builtins memory access are not instrumented in this
811 function because they either don't have any length parameter,
812 or their length parameter is just a limit. */
813 break;
814 }
815
816 if (len != NULL_TREE)
817 {
818 if (source0 != NULL_TREE)
819 {
820 src0->start = source0;
821 src0->access_size = access_size;
822 *src0_len = len;
823 *src0_is_store = false;
824 }
825
826 if (source1 != NULL_TREE)
827 {
828 src1->start = source1;
829 src1->access_size = access_size;
830 *src1_len = len;
831 *src1_is_store = false;
832 }
833
834 if (dest != NULL_TREE)
835 {
836 dst->start = dest;
837 dst->access_size = access_size;
838 *dst_len = len;
839 *dst_is_store = true;
840 }
841
842 got_reference_p = true;
843 }
844 else if (dest)
845 {
846 dst->start = dest;
847 dst->access_size = access_size;
848 *dst_len = NULL_TREE;
849 *dst_is_store = is_store;
850 *dest_is_deref = true;
851 got_reference_p = true;
852 }
853
854 return got_reference_p;
855 }
856
857 /* Return true iff a given gimple statement has been instrumented.
858 Note that the statement is "defined" by the memory references it
859 contains. */
860
861 static bool
862 has_stmt_been_instrumented_p (gimple *stmt)
863 {
864 if (gimple_assign_single_p (stmt))
865 {
866 bool r_is_store;
867 asan_mem_ref r;
868 asan_mem_ref_init (&r, NULL, 1);
869
870 if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
871 &r_is_store))
872 return has_mem_ref_been_instrumented (&r);
873 }
874 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
875 {
876 asan_mem_ref src0, src1, dest;
877 asan_mem_ref_init (&src0, NULL, 1);
878 asan_mem_ref_init (&src1, NULL, 1);
879 asan_mem_ref_init (&dest, NULL, 1);
880
881 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
882 bool src0_is_store = false, src1_is_store = false,
883 dest_is_store = false, dest_is_deref = false, intercepted_p = true;
884 if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
885 &src0, &src0_len, &src0_is_store,
886 &src1, &src1_len, &src1_is_store,
887 &dest, &dest_len, &dest_is_store,
888 &dest_is_deref, &intercepted_p))
889 {
890 if (src0.start != NULL_TREE
891 && !has_mem_ref_been_instrumented (&src0, src0_len))
892 return false;
893
894 if (src1.start != NULL_TREE
895 && !has_mem_ref_been_instrumented (&src1, src1_len))
896 return false;
897
898 if (dest.start != NULL_TREE
899 && !has_mem_ref_been_instrumented (&dest, dest_len))
900 return false;
901
902 return true;
903 }
904 }
905 return false;
906 }
907
908 /* Insert a memory reference into the hash table. */
909
910 static void
911 update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
912 {
913 hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
914
915 asan_mem_ref r;
916 asan_mem_ref_init (&r, ref, access_size);
917
918 asan_mem_ref **slot = ht->find_slot (&r, INSERT);
919 if (*slot == NULL || (*slot)->access_size < access_size)
920 *slot = asan_mem_ref_new (ref, access_size);
921 }
922
923 /* Initialize shadow_ptr_types array. */
924
925 static void
926 asan_init_shadow_ptr_types (void)
927 {
928 asan_shadow_set = new_alias_set ();
929 shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
930 TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
931 shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
932 shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
933 TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
934 shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
935 initialize_sanitizer_builtins ();
936 }
937
938 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
939
940 static tree
941 asan_pp_string (pretty_printer *pp)
942 {
943 const char *buf = pp_formatted_text (pp);
944 size_t len = strlen (buf);
945 tree ret = build_string (len + 1, buf);
946 TREE_TYPE (ret)
947 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
948 build_index_type (size_int (len)));
949 TREE_READONLY (ret) = 1;
950 TREE_STATIC (ret) = 1;
951 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
952 }
953
954 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
955
956 static rtx
957 asan_shadow_cst (unsigned char shadow_bytes[4])
958 {
959 int i;
960 unsigned HOST_WIDE_INT val = 0;
961 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
962 for (i = 0; i < 4; i++)
963 val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
964 << (BITS_PER_UNIT * i);
965 return gen_int_mode (val, SImode);
966 }
967
968 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
969 though. */
970
971 static void
972 asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
973 {
974 rtx_insn *insn, *insns, *jump;
975 rtx_code_label *top_label;
976 rtx end, addr, tmp;
977
978 start_sequence ();
979 clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
980 insns = get_insns ();
981 end_sequence ();
982 for (insn = insns; insn; insn = NEXT_INSN (insn))
983 if (CALL_P (insn))
984 break;
985 if (insn == NULL_RTX)
986 {
987 emit_insn (insns);
988 return;
989 }
990
991 gcc_assert ((len & 3) == 0);
992 top_label = gen_label_rtx ();
993 addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
994 shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
995 end = force_reg (Pmode, plus_constant (Pmode, addr, len));
996 emit_label (top_label);
997
998 emit_move_insn (shadow_mem, const0_rtx);
999 tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
1000 true, OPTAB_LIB_WIDEN);
1001 if (tmp != addr)
1002 emit_move_insn (addr, tmp);
1003 emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
1004 jump = get_last_insn ();
1005 gcc_assert (JUMP_P (jump));
1006 add_int_reg_note (jump, REG_BR_PROB, REG_BR_PROB_BASE * 80 / 100);
1007 }
1008
1009 void
1010 asan_function_start (void)
1011 {
1012 section *fnsec = function_section (current_function_decl);
1013 switch_to_section (fnsec);
1014 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
1015 current_function_funcdef_no);
1016 }
1017
1018 /* Insert code to protect stack vars. The prologue sequence should be emitted
1019 directly, epilogue sequence returned. BASE is the register holding the
1020 stack base, against which OFFSETS array offsets are relative to, OFFSETS
1021 array contains pairs of offsets in reverse order, always the end offset
1022 of some gap that needs protection followed by starting offset,
1023 and DECLS is an array of representative decls for each var partition.
1024 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1025 elements long (OFFSETS include gap before the first variable as well
1026 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
1027 register which stack vars DECL_RTLs are based on. Either BASE should be
1028 assigned to PBASE, when not doing use after return protection, or
1029 corresponding address based on __asan_stack_malloc* return value. */
1030
1031 rtx_insn *
1032 asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
1033 HOST_WIDE_INT *offsets, tree *decls, int length)
1034 {
1035 rtx shadow_base, shadow_mem, ret, mem, orig_base;
1036 rtx_code_label *lab;
1037 rtx_insn *insns;
1038 char buf[30];
1039 unsigned char shadow_bytes[4];
1040 HOST_WIDE_INT base_offset = offsets[length - 1];
1041 HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
1042 HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
1043 HOST_WIDE_INT last_offset, last_size;
1044 int l;
1045 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
1046 tree str_cst, decl, id;
1047 int use_after_return_class = -1;
1048
1049 if (shadow_ptr_types[0] == NULL_TREE)
1050 asan_init_shadow_ptr_types ();
1051
1052 /* First of all, prepare the description string. */
1053 pretty_printer asan_pp;
1054
1055 pp_decimal_int (&asan_pp, length / 2 - 1);
1056 pp_space (&asan_pp);
1057 for (l = length - 2; l; l -= 2)
1058 {
1059 tree decl = decls[l / 2 - 1];
1060 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
1061 pp_space (&asan_pp);
1062 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
1063 pp_space (&asan_pp);
1064 if (DECL_P (decl) && DECL_NAME (decl))
1065 {
1066 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
1067 pp_space (&asan_pp);
1068 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
1069 }
1070 else
1071 pp_string (&asan_pp, "9 <unknown>");
1072 pp_space (&asan_pp);
1073 }
1074 str_cst = asan_pp_string (&asan_pp);
1075
1076 /* Emit the prologue sequence. */
1077 if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
1078 && ASAN_USE_AFTER_RETURN)
1079 {
1080 use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
1081 /* __asan_stack_malloc_N guarantees alignment
1082 N < 6 ? (64 << N) : 4096 bytes. */
1083 if (alignb > (use_after_return_class < 6
1084 ? (64U << use_after_return_class) : 4096U))
1085 use_after_return_class = -1;
1086 else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
1087 base_align_bias = ((asan_frame_size + alignb - 1)
1088 & ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
1089 }
1090 /* Align base if target is STRICT_ALIGNMENT. */
1091 if (STRICT_ALIGNMENT)
1092 base = expand_binop (Pmode, and_optab, base,
1093 gen_int_mode (-((GET_MODE_ALIGNMENT (SImode)
1094 << ASAN_SHADOW_SHIFT)
1095 / BITS_PER_UNIT), Pmode), NULL_RTX,
1096 1, OPTAB_DIRECT);
1097
1098 if (use_after_return_class == -1 && pbase)
1099 emit_move_insn (pbase, base);
1100
1101 base = expand_binop (Pmode, add_optab, base,
1102 gen_int_mode (base_offset - base_align_bias, Pmode),
1103 NULL_RTX, 1, OPTAB_DIRECT);
1104 orig_base = NULL_RTX;
1105 if (use_after_return_class != -1)
1106 {
1107 if (asan_detect_stack_use_after_return == NULL_TREE)
1108 {
1109 id = get_identifier ("__asan_option_detect_stack_use_after_return");
1110 decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
1111 integer_type_node);
1112 SET_DECL_ASSEMBLER_NAME (decl, id);
1113 TREE_ADDRESSABLE (decl) = 1;
1114 DECL_ARTIFICIAL (decl) = 1;
1115 DECL_IGNORED_P (decl) = 1;
1116 DECL_EXTERNAL (decl) = 1;
1117 TREE_STATIC (decl) = 1;
1118 TREE_PUBLIC (decl) = 1;
1119 TREE_USED (decl) = 1;
1120 asan_detect_stack_use_after_return = decl;
1121 }
1122 orig_base = gen_reg_rtx (Pmode);
1123 emit_move_insn (orig_base, base);
1124 ret = expand_normal (asan_detect_stack_use_after_return);
1125 lab = gen_label_rtx ();
1126 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1127 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1128 VOIDmode, 0, lab, very_likely);
1129 snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
1130 use_after_return_class);
1131 ret = init_one_libfunc (buf);
1132 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 1,
1133 GEN_INT (asan_frame_size
1134 + base_align_bias),
1135 TYPE_MODE (pointer_sized_int_node));
1136 /* __asan_stack_malloc_[n] returns a pointer to fake stack if succeeded
1137 and NULL otherwise. Check RET value is NULL here and jump over the
1138 BASE reassignment in this case. Otherwise, reassign BASE to RET. */
1139 int very_unlikely = REG_BR_PROB_BASE / 2000 - 1;
1140 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1141 VOIDmode, 0, lab, very_unlikely);
1142 ret = convert_memory_address (Pmode, ret);
1143 emit_move_insn (base, ret);
1144 emit_label (lab);
1145 emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
1146 gen_int_mode (base_align_bias
1147 - base_offset, Pmode),
1148 NULL_RTX, 1, OPTAB_DIRECT));
1149 }
1150 mem = gen_rtx_MEM (ptr_mode, base);
1151 mem = adjust_address (mem, VOIDmode, base_align_bias);
1152 emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
1153 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1154 emit_move_insn (mem, expand_normal (str_cst));
1155 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1156 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
1157 id = get_identifier (buf);
1158 decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1159 VAR_DECL, id, char_type_node);
1160 SET_DECL_ASSEMBLER_NAME (decl, id);
1161 TREE_ADDRESSABLE (decl) = 1;
1162 TREE_READONLY (decl) = 1;
1163 DECL_ARTIFICIAL (decl) = 1;
1164 DECL_IGNORED_P (decl) = 1;
1165 TREE_STATIC (decl) = 1;
1166 TREE_PUBLIC (decl) = 0;
1167 TREE_USED (decl) = 1;
1168 DECL_INITIAL (decl) = decl;
1169 TREE_ASM_WRITTEN (decl) = 1;
1170 TREE_ASM_WRITTEN (id) = 1;
1171 emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
1172 shadow_base = expand_binop (Pmode, lshr_optab, base,
1173 GEN_INT (ASAN_SHADOW_SHIFT),
1174 NULL_RTX, 1, OPTAB_DIRECT);
1175 shadow_base
1176 = plus_constant (Pmode, shadow_base,
1177 asan_shadow_offset ()
1178 + (base_align_bias >> ASAN_SHADOW_SHIFT));
1179 gcc_assert (asan_shadow_set != -1
1180 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1181 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
1182 set_mem_alias_set (shadow_mem, asan_shadow_set);
1183 if (STRICT_ALIGNMENT)
1184 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1185 prev_offset = base_offset;
1186 for (l = length; l; l -= 2)
1187 {
1188 if (l == 2)
1189 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1190 offset = offsets[l - 1];
1191 if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
1192 {
1193 int i;
1194 HOST_WIDE_INT aoff
1195 = base_offset + ((offset - base_offset)
1196 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1197 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1198 (aoff - prev_offset)
1199 >> ASAN_SHADOW_SHIFT);
1200 prev_offset = aoff;
1201 for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
1202 if (aoff < offset)
1203 {
1204 if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
1205 shadow_bytes[i] = 0;
1206 else
1207 shadow_bytes[i] = offset - aoff;
1208 }
1209 else
1210 shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
1211 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1212 offset = aoff;
1213 }
1214 while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
1215 {
1216 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1217 (offset - prev_offset)
1218 >> ASAN_SHADOW_SHIFT);
1219 prev_offset = offset;
1220 memset (shadow_bytes, cur_shadow_byte, 4);
1221 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1222 offset += ASAN_RED_ZONE_SIZE;
1223 }
1224 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1225 }
1226 do_pending_stack_adjust ();
1227
1228 /* Construct epilogue sequence. */
1229 start_sequence ();
1230
1231 lab = NULL;
1232 if (use_after_return_class != -1)
1233 {
1234 rtx_code_label *lab2 = gen_label_rtx ();
1235 char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
1236 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1237 emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
1238 VOIDmode, 0, lab2, very_likely);
1239 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1240 set_mem_alias_set (shadow_mem, asan_shadow_set);
1241 mem = gen_rtx_MEM (ptr_mode, base);
1242 mem = adjust_address (mem, VOIDmode, base_align_bias);
1243 emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
1244 unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
1245 if (use_after_return_class < 5
1246 && can_store_by_pieces (sz, builtin_memset_read_str, &c,
1247 BITS_PER_UNIT, true))
1248 store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
1249 BITS_PER_UNIT, true, 0);
1250 else if (use_after_return_class >= 5
1251 || !set_storage_via_setmem (shadow_mem,
1252 GEN_INT (sz),
1253 gen_int_mode (c, QImode),
1254 BITS_PER_UNIT, BITS_PER_UNIT,
1255 -1, sz, sz, sz))
1256 {
1257 snprintf (buf, sizeof buf, "__asan_stack_free_%d",
1258 use_after_return_class);
1259 ret = init_one_libfunc (buf);
1260 rtx addr = convert_memory_address (ptr_mode, base);
1261 rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
1262 emit_library_call (ret, LCT_NORMAL, ptr_mode, 3, addr, ptr_mode,
1263 GEN_INT (asan_frame_size + base_align_bias),
1264 TYPE_MODE (pointer_sized_int_node),
1265 orig_addr, ptr_mode);
1266 }
1267 lab = gen_label_rtx ();
1268 emit_jump (lab);
1269 emit_label (lab2);
1270 }
1271
1272 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1273 set_mem_alias_set (shadow_mem, asan_shadow_set);
1274
1275 if (STRICT_ALIGNMENT)
1276 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1277
1278 prev_offset = base_offset;
1279 last_offset = base_offset;
1280 last_size = 0;
1281 for (l = length; l; l -= 2)
1282 {
1283 offset = base_offset + ((offsets[l - 1] - base_offset)
1284 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1285 if (last_offset + last_size != offset)
1286 {
1287 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1288 (last_offset - prev_offset)
1289 >> ASAN_SHADOW_SHIFT);
1290 prev_offset = last_offset;
1291 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1292 last_offset = offset;
1293 last_size = 0;
1294 }
1295 last_size += base_offset + ((offsets[l - 2] - base_offset)
1296 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1297 - offset;
1298 }
1299 if (last_size)
1300 {
1301 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1302 (last_offset - prev_offset)
1303 >> ASAN_SHADOW_SHIFT);
1304 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1305 }
1306
1307 do_pending_stack_adjust ();
1308 if (lab)
1309 emit_label (lab);
1310
1311 insns = get_insns ();
1312 end_sequence ();
1313 return insns;
1314 }
1315
1316 /* Return true if DECL, a global var, might be overridden and needs
1317 therefore a local alias. */
1318
1319 static bool
1320 asan_needs_local_alias (tree decl)
1321 {
1322 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1323 }
1324
1325 /* Return true if DECL is a VAR_DECL that should be protected
1326 by Address Sanitizer, by appending a red zone with protected
1327 shadow memory after it and aligning it to at least
1328 ASAN_RED_ZONE_SIZE bytes. */
1329
1330 bool
1331 asan_protect_global (tree decl)
1332 {
1333 if (!ASAN_GLOBALS)
1334 return false;
1335
1336 rtx rtl, symbol;
1337
1338 if (TREE_CODE (decl) == STRING_CST)
1339 {
1340 /* Instrument all STRING_CSTs except those created
1341 by asan_pp_string here. */
1342 if (shadow_ptr_types[0] != NULL_TREE
1343 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1344 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1345 return false;
1346 return true;
1347 }
1348 if (TREE_CODE (decl) != VAR_DECL
1349 /* TLS vars aren't statically protectable. */
1350 || DECL_THREAD_LOCAL_P (decl)
1351 /* Externs will be protected elsewhere. */
1352 || DECL_EXTERNAL (decl)
1353 || !DECL_RTL_SET_P (decl)
1354 /* Comdat vars pose an ABI problem, we can't know if
1355 the var that is selected by the linker will have
1356 padding or not. */
1357 || DECL_ONE_ONLY (decl)
1358 /* Similarly for common vars. People can use -fno-common.
1359 Note: Linux kernel is built with -fno-common, so we do instrument
1360 globals there even if it is C. */
1361 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
1362 /* Don't protect if using user section, often vars placed
1363 into user section from multiple TUs are then assumed
1364 to be an array of such vars, putting padding in there
1365 breaks this assumption. */
1366 || (DECL_SECTION_NAME (decl) != NULL
1367 && !symtab_node::get (decl)->implicit_section
1368 && !section_sanitized_p (DECL_SECTION_NAME (decl)))
1369 || DECL_SIZE (decl) == 0
1370 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1371 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1372 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
1373 || TREE_TYPE (decl) == ubsan_get_source_location_type ())
1374 return false;
1375
1376 rtl = DECL_RTL (decl);
1377 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1378 return false;
1379 symbol = XEXP (rtl, 0);
1380
1381 if (CONSTANT_POOL_ADDRESS_P (symbol)
1382 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1383 return false;
1384
1385 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1386 return false;
1387
1388 #ifndef ASM_OUTPUT_DEF
1389 if (asan_needs_local_alias (decl))
1390 return false;
1391 #endif
1392
1393 return true;
1394 }
1395
1396 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1397 IS_STORE is either 1 (for a store) or 0 (for a load). */
1398
1399 static tree
1400 report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1401 int *nargs)
1402 {
1403 static enum built_in_function report[2][2][6]
1404 = { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1405 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1406 BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
1407 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1408 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1409 BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } },
1410 { { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT,
1411 BUILT_IN_ASAN_REPORT_LOAD2_NOABORT,
1412 BUILT_IN_ASAN_REPORT_LOAD4_NOABORT,
1413 BUILT_IN_ASAN_REPORT_LOAD8_NOABORT,
1414 BUILT_IN_ASAN_REPORT_LOAD16_NOABORT,
1415 BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT },
1416 { BUILT_IN_ASAN_REPORT_STORE1_NOABORT,
1417 BUILT_IN_ASAN_REPORT_STORE2_NOABORT,
1418 BUILT_IN_ASAN_REPORT_STORE4_NOABORT,
1419 BUILT_IN_ASAN_REPORT_STORE8_NOABORT,
1420 BUILT_IN_ASAN_REPORT_STORE16_NOABORT,
1421 BUILT_IN_ASAN_REPORT_STORE_N_NOABORT } } };
1422 if (size_in_bytes == -1)
1423 {
1424 *nargs = 2;
1425 return builtin_decl_implicit (report[recover_p][is_store][5]);
1426 }
1427 *nargs = 1;
1428 int size_log2 = exact_log2 (size_in_bytes);
1429 return builtin_decl_implicit (report[recover_p][is_store][size_log2]);
1430 }
1431
1432 /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1433 IS_STORE is either 1 (for a store) or 0 (for a load). */
1434
1435 static tree
1436 check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1437 int *nargs)
1438 {
1439 static enum built_in_function check[2][2][6]
1440 = { { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
1441 BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
1442 BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
1443 { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
1444 BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
1445 BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } },
1446 { { BUILT_IN_ASAN_LOAD1_NOABORT,
1447 BUILT_IN_ASAN_LOAD2_NOABORT,
1448 BUILT_IN_ASAN_LOAD4_NOABORT,
1449 BUILT_IN_ASAN_LOAD8_NOABORT,
1450 BUILT_IN_ASAN_LOAD16_NOABORT,
1451 BUILT_IN_ASAN_LOADN_NOABORT },
1452 { BUILT_IN_ASAN_STORE1_NOABORT,
1453 BUILT_IN_ASAN_STORE2_NOABORT,
1454 BUILT_IN_ASAN_STORE4_NOABORT,
1455 BUILT_IN_ASAN_STORE8_NOABORT,
1456 BUILT_IN_ASAN_STORE16_NOABORT,
1457 BUILT_IN_ASAN_STOREN_NOABORT } } };
1458 if (size_in_bytes == -1)
1459 {
1460 *nargs = 2;
1461 return builtin_decl_implicit (check[recover_p][is_store][5]);
1462 }
1463 *nargs = 1;
1464 int size_log2 = exact_log2 (size_in_bytes);
1465 return builtin_decl_implicit (check[recover_p][is_store][size_log2]);
1466 }
1467
1468 /* Split the current basic block and create a condition statement
1469 insertion point right before or after the statement pointed to by
1470 ITER. Return an iterator to the point at which the caller might
1471 safely insert the condition statement.
1472
1473 THEN_BLOCK must be set to the address of an uninitialized instance
1474 of basic_block. The function will then set *THEN_BLOCK to the
1475 'then block' of the condition statement to be inserted by the
1476 caller.
1477
1478 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1479 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1480
1481 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1482 block' of the condition statement to be inserted by the caller.
1483
1484 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1485 statements starting from *ITER, and *THEN_BLOCK is a new empty
1486 block.
1487
1488 *ITER is adjusted to point to always point to the first statement
1489 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1490 same as what ITER was pointing to prior to calling this function,
1491 if BEFORE_P is true; otherwise, it is its following statement. */
1492
1493 gimple_stmt_iterator
1494 create_cond_insert_point (gimple_stmt_iterator *iter,
1495 bool before_p,
1496 bool then_more_likely_p,
1497 bool create_then_fallthru_edge,
1498 basic_block *then_block,
1499 basic_block *fallthrough_block)
1500 {
1501 gimple_stmt_iterator gsi = *iter;
1502
1503 if (!gsi_end_p (gsi) && before_p)
1504 gsi_prev (&gsi);
1505
1506 basic_block cur_bb = gsi_bb (*iter);
1507
1508 edge e = split_block (cur_bb, gsi_stmt (gsi));
1509
1510 /* Get a hold on the 'condition block', the 'then block' and the
1511 'else block'. */
1512 basic_block cond_bb = e->src;
1513 basic_block fallthru_bb = e->dest;
1514 basic_block then_bb = create_empty_bb (cond_bb);
1515 if (current_loops)
1516 {
1517 add_bb_to_loop (then_bb, cond_bb->loop_father);
1518 loops_state_set (LOOPS_NEED_FIXUP);
1519 }
1520
1521 /* Set up the newly created 'then block'. */
1522 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1523 int fallthrough_probability
1524 = then_more_likely_p
1525 ? PROB_VERY_UNLIKELY
1526 : PROB_ALWAYS - PROB_VERY_UNLIKELY;
1527 e->probability = PROB_ALWAYS - fallthrough_probability;
1528 if (create_then_fallthru_edge)
1529 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1530
1531 /* Set up the fallthrough basic block. */
1532 e = find_edge (cond_bb, fallthru_bb);
1533 e->flags = EDGE_FALSE_VALUE;
1534 e->count = cond_bb->count;
1535 e->probability = fallthrough_probability;
1536
1537 /* Update dominance info for the newly created then_bb; note that
1538 fallthru_bb's dominance info has already been updated by
1539 split_bock. */
1540 if (dom_info_available_p (CDI_DOMINATORS))
1541 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1542
1543 *then_block = then_bb;
1544 *fallthrough_block = fallthru_bb;
1545 *iter = gsi_start_bb (fallthru_bb);
1546
1547 return gsi_last_bb (cond_bb);
1548 }
1549
1550 /* Insert an if condition followed by a 'then block' right before the
1551 statement pointed to by ITER. The fallthrough block -- which is the
1552 else block of the condition as well as the destination of the
1553 outcoming edge of the 'then block' -- starts with the statement
1554 pointed to by ITER.
1555
1556 COND is the condition of the if.
1557
1558 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1559 'then block' is higher than the probability of the edge to the
1560 fallthrough block.
1561
1562 Upon completion of the function, *THEN_BB is set to the newly
1563 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1564 fallthrough block.
1565
1566 *ITER is adjusted to still point to the same statement it was
1567 pointing to initially. */
1568
1569 static void
1570 insert_if_then_before_iter (gcond *cond,
1571 gimple_stmt_iterator *iter,
1572 bool then_more_likely_p,
1573 basic_block *then_bb,
1574 basic_block *fallthrough_bb)
1575 {
1576 gimple_stmt_iterator cond_insert_point =
1577 create_cond_insert_point (iter,
1578 /*before_p=*/true,
1579 then_more_likely_p,
1580 /*create_then_fallthru_edge=*/true,
1581 then_bb,
1582 fallthrough_bb);
1583 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
1584 }
1585
1586 /* Build
1587 (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset (). */
1588
1589 static tree
1590 build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
1591 tree base_addr, tree shadow_ptr_type)
1592 {
1593 tree t, uintptr_type = TREE_TYPE (base_addr);
1594 tree shadow_type = TREE_TYPE (shadow_ptr_type);
1595 gimple *g;
1596
1597 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
1598 g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
1599 base_addr, t);
1600 gimple_set_location (g, location);
1601 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1602
1603 t = build_int_cst (uintptr_type, asan_shadow_offset ());
1604 g = gimple_build_assign (make_ssa_name (uintptr_type), PLUS_EXPR,
1605 gimple_assign_lhs (g), t);
1606 gimple_set_location (g, location);
1607 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1608
1609 g = gimple_build_assign (make_ssa_name (shadow_ptr_type), NOP_EXPR,
1610 gimple_assign_lhs (g));
1611 gimple_set_location (g, location);
1612 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1613
1614 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
1615 build_int_cst (shadow_ptr_type, 0));
1616 g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
1617 gimple_set_location (g, location);
1618 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1619 return gimple_assign_lhs (g);
1620 }
1621
1622 /* BASE can already be an SSA_NAME; in that case, do not create a
1623 new SSA_NAME for it. */
1624
1625 static tree
1626 maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
1627 bool before_p)
1628 {
1629 if (TREE_CODE (base) == SSA_NAME)
1630 return base;
1631 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)),
1632 TREE_CODE (base), base);
1633 gimple_set_location (g, loc);
1634 if (before_p)
1635 gsi_insert_before (iter, g, GSI_SAME_STMT);
1636 else
1637 gsi_insert_after (iter, g, GSI_NEW_STMT);
1638 return gimple_assign_lhs (g);
1639 }
1640
1641 /* LEN can already have necessary size and precision;
1642 in that case, do not create a new variable. */
1643
1644 tree
1645 maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
1646 bool before_p)
1647 {
1648 if (ptrofftype_p (len))
1649 return len;
1650 gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
1651 NOP_EXPR, len);
1652 gimple_set_location (g, loc);
1653 if (before_p)
1654 gsi_insert_before (iter, g, GSI_SAME_STMT);
1655 else
1656 gsi_insert_after (iter, g, GSI_NEW_STMT);
1657 return gimple_assign_lhs (g);
1658 }
1659
1660 /* Instrument the memory access instruction BASE. Insert new
1661 statements before or after ITER.
1662
1663 Note that the memory access represented by BASE can be either an
1664 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1665 location. IS_STORE is TRUE for a store, FALSE for a load.
1666 BEFORE_P is TRUE for inserting the instrumentation code before
1667 ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
1668 for a scalar memory access and FALSE for memory region access.
1669 NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
1670 length. ALIGN tells alignment of accessed memory object.
1671
1672 START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
1673 memory region have already been instrumented.
1674
1675 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1676 statement it was pointing to prior to calling this function,
1677 otherwise, it points to the statement logically following it. */
1678
1679 static void
1680 build_check_stmt (location_t loc, tree base, tree len,
1681 HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
1682 bool is_non_zero_len, bool before_p, bool is_store,
1683 bool is_scalar_access, unsigned int align = 0)
1684 {
1685 gimple_stmt_iterator gsi = *iter;
1686 gimple *g;
1687
1688 gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
1689
1690 gsi = *iter;
1691
1692 base = unshare_expr (base);
1693 base = maybe_create_ssa_name (loc, base, &gsi, before_p);
1694
1695 if (len)
1696 {
1697 len = unshare_expr (len);
1698 len = maybe_cast_to_ptrmode (loc, len, iter, before_p);
1699 }
1700 else
1701 {
1702 gcc_assert (size_in_bytes != -1);
1703 len = build_int_cst (pointer_sized_int_node, size_in_bytes);
1704 }
1705
1706 if (size_in_bytes > 1)
1707 {
1708 if ((size_in_bytes & (size_in_bytes - 1)) != 0
1709 || size_in_bytes > 16)
1710 is_scalar_access = false;
1711 else if (align && align < size_in_bytes * BITS_PER_UNIT)
1712 {
1713 /* On non-strict alignment targets, if
1714 16-byte access is just 8-byte aligned,
1715 this will result in misaligned shadow
1716 memory 2 byte load, but otherwise can
1717 be handled using one read. */
1718 if (size_in_bytes != 16
1719 || STRICT_ALIGNMENT
1720 || align < 8 * BITS_PER_UNIT)
1721 is_scalar_access = false;
1722 }
1723 }
1724
1725 HOST_WIDE_INT flags = 0;
1726 if (is_store)
1727 flags |= ASAN_CHECK_STORE;
1728 if (is_non_zero_len)
1729 flags |= ASAN_CHECK_NON_ZERO_LEN;
1730 if (is_scalar_access)
1731 flags |= ASAN_CHECK_SCALAR_ACCESS;
1732
1733 g = gimple_build_call_internal (IFN_ASAN_CHECK, 4,
1734 build_int_cst (integer_type_node, flags),
1735 base, len,
1736 build_int_cst (integer_type_node,
1737 align / BITS_PER_UNIT));
1738 gimple_set_location (g, loc);
1739 if (before_p)
1740 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
1741 else
1742 {
1743 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1744 gsi_next (&gsi);
1745 *iter = gsi;
1746 }
1747 }
1748
1749 /* If T represents a memory access, add instrumentation code before ITER.
1750 LOCATION is source code location.
1751 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
1752
1753 static void
1754 instrument_derefs (gimple_stmt_iterator *iter, tree t,
1755 location_t location, bool is_store)
1756 {
1757 if (is_store && !ASAN_INSTRUMENT_WRITES)
1758 return;
1759 if (!is_store && !ASAN_INSTRUMENT_READS)
1760 return;
1761
1762 tree type, base;
1763 HOST_WIDE_INT size_in_bytes;
1764
1765 type = TREE_TYPE (t);
1766 switch (TREE_CODE (t))
1767 {
1768 case ARRAY_REF:
1769 case COMPONENT_REF:
1770 case INDIRECT_REF:
1771 case MEM_REF:
1772 case VAR_DECL:
1773 case BIT_FIELD_REF:
1774 break;
1775 /* FALLTHRU */
1776 default:
1777 return;
1778 }
1779
1780 size_in_bytes = int_size_in_bytes (type);
1781 if (size_in_bytes <= 0)
1782 return;
1783
1784 HOST_WIDE_INT bitsize, bitpos;
1785 tree offset;
1786 machine_mode mode;
1787 int volatilep = 0, unsignedp = 0;
1788 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset,
1789 &mode, &unsignedp, &volatilep, false);
1790
1791 if (TREE_CODE (t) == COMPONENT_REF
1792 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
1793 {
1794 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
1795 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
1796 TREE_OPERAND (t, 0), repr,
1797 NULL_TREE), location, is_store);
1798 return;
1799 }
1800
1801 if (bitpos % BITS_PER_UNIT
1802 || bitsize != size_in_bytes * BITS_PER_UNIT)
1803 return;
1804
1805 if (TREE_CODE (inner) == VAR_DECL
1806 && offset == NULL_TREE
1807 && bitpos >= 0
1808 && DECL_SIZE (inner)
1809 && tree_fits_shwi_p (DECL_SIZE (inner))
1810 && bitpos + bitsize <= tree_to_shwi (DECL_SIZE (inner)))
1811 {
1812 if (DECL_THREAD_LOCAL_P (inner))
1813 return;
1814 if (!ASAN_GLOBALS && is_global_var (inner))
1815 return;
1816 if (!TREE_STATIC (inner))
1817 {
1818 /* Automatic vars in the current function will be always
1819 accessible. */
1820 if (decl_function_context (inner) == current_function_decl)
1821 return;
1822 }
1823 /* Always instrument external vars, they might be dynamically
1824 initialized. */
1825 else if (!DECL_EXTERNAL (inner))
1826 {
1827 /* For static vars if they are known not to be dynamically
1828 initialized, they will be always accessible. */
1829 varpool_node *vnode = varpool_node::get (inner);
1830 if (vnode && !vnode->dynamically_initialized)
1831 return;
1832 }
1833 }
1834
1835 base = build_fold_addr_expr (t);
1836 if (!has_mem_ref_been_instrumented (base, size_in_bytes))
1837 {
1838 unsigned int align = get_object_alignment (t);
1839 build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
1840 /*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true,
1841 is_store, /*is_scalar_access*/true, align);
1842 update_mem_ref_hash_table (base, size_in_bytes);
1843 update_mem_ref_hash_table (t, size_in_bytes);
1844 }
1845
1846 }
1847
1848 /* Insert a memory reference into the hash table if access length
1849 can be determined in compile time. */
1850
1851 static void
1852 maybe_update_mem_ref_hash_table (tree base, tree len)
1853 {
1854 if (!POINTER_TYPE_P (TREE_TYPE (base))
1855 || !INTEGRAL_TYPE_P (TREE_TYPE (len)))
1856 return;
1857
1858 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1859
1860 if (size_in_bytes != -1)
1861 update_mem_ref_hash_table (base, size_in_bytes);
1862 }
1863
1864 /* Instrument an access to a contiguous memory region that starts at
1865 the address pointed to by BASE, over a length of LEN (expressed in
1866 the sizeof (*BASE) bytes). ITER points to the instruction before
1867 which the instrumentation instructions must be inserted. LOCATION
1868 is the source location that the instrumentation instructions must
1869 have. If IS_STORE is true, then the memory access is a store;
1870 otherwise, it's a load. */
1871
1872 static void
1873 instrument_mem_region_access (tree base, tree len,
1874 gimple_stmt_iterator *iter,
1875 location_t location, bool is_store)
1876 {
1877 if (!POINTER_TYPE_P (TREE_TYPE (base))
1878 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
1879 || integer_zerop (len))
1880 return;
1881
1882 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1883
1884 if ((size_in_bytes == -1)
1885 || !has_mem_ref_been_instrumented (base, size_in_bytes))
1886 {
1887 build_check_stmt (location, base, len, size_in_bytes, iter,
1888 /*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true,
1889 is_store, /*is_scalar_access*/false, /*align*/0);
1890 }
1891
1892 maybe_update_mem_ref_hash_table (base, len);
1893 *iter = gsi_for_stmt (gsi_stmt (*iter));
1894 }
1895
1896 /* Instrument the call to a built-in memory access function that is
1897 pointed to by the iterator ITER.
1898
1899 Upon completion, return TRUE iff *ITER has been advanced to the
1900 statement following the one it was originally pointing to. */
1901
1902 static bool
1903 instrument_builtin_call (gimple_stmt_iterator *iter)
1904 {
1905 if (!ASAN_MEMINTRIN)
1906 return false;
1907
1908 bool iter_advanced_p = false;
1909 gcall *call = as_a <gcall *> (gsi_stmt (*iter));
1910
1911 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
1912
1913 location_t loc = gimple_location (call);
1914
1915 asan_mem_ref src0, src1, dest;
1916 asan_mem_ref_init (&src0, NULL, 1);
1917 asan_mem_ref_init (&src1, NULL, 1);
1918 asan_mem_ref_init (&dest, NULL, 1);
1919
1920 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
1921 bool src0_is_store = false, src1_is_store = false, dest_is_store = false,
1922 dest_is_deref = false, intercepted_p = true;
1923
1924 if (get_mem_refs_of_builtin_call (call,
1925 &src0, &src0_len, &src0_is_store,
1926 &src1, &src1_len, &src1_is_store,
1927 &dest, &dest_len, &dest_is_store,
1928 &dest_is_deref, &intercepted_p))
1929 {
1930 if (dest_is_deref)
1931 {
1932 instrument_derefs (iter, dest.start, loc, dest_is_store);
1933 gsi_next (iter);
1934 iter_advanced_p = true;
1935 }
1936 else if (!intercepted_p
1937 && (src0_len || src1_len || dest_len))
1938 {
1939 if (src0.start != NULL_TREE)
1940 instrument_mem_region_access (src0.start, src0_len,
1941 iter, loc, /*is_store=*/false);
1942 if (src1.start != NULL_TREE)
1943 instrument_mem_region_access (src1.start, src1_len,
1944 iter, loc, /*is_store=*/false);
1945 if (dest.start != NULL_TREE)
1946 instrument_mem_region_access (dest.start, dest_len,
1947 iter, loc, /*is_store=*/true);
1948
1949 *iter = gsi_for_stmt (call);
1950 gsi_next (iter);
1951 iter_advanced_p = true;
1952 }
1953 else
1954 {
1955 if (src0.start != NULL_TREE)
1956 maybe_update_mem_ref_hash_table (src0.start, src0_len);
1957 if (src1.start != NULL_TREE)
1958 maybe_update_mem_ref_hash_table (src1.start, src1_len);
1959 if (dest.start != NULL_TREE)
1960 maybe_update_mem_ref_hash_table (dest.start, dest_len);
1961 }
1962 }
1963 return iter_advanced_p;
1964 }
1965
1966 /* Instrument the assignment statement ITER if it is subject to
1967 instrumentation. Return TRUE iff instrumentation actually
1968 happened. In that case, the iterator ITER is advanced to the next
1969 logical expression following the one initially pointed to by ITER,
1970 and the relevant memory reference that which access has been
1971 instrumented is added to the memory references hash table. */
1972
1973 static bool
1974 maybe_instrument_assignment (gimple_stmt_iterator *iter)
1975 {
1976 gimple *s = gsi_stmt (*iter);
1977
1978 gcc_assert (gimple_assign_single_p (s));
1979
1980 tree ref_expr = NULL_TREE;
1981 bool is_store, is_instrumented = false;
1982
1983 if (gimple_store_p (s))
1984 {
1985 ref_expr = gimple_assign_lhs (s);
1986 is_store = true;
1987 instrument_derefs (iter, ref_expr,
1988 gimple_location (s),
1989 is_store);
1990 is_instrumented = true;
1991 }
1992
1993 if (gimple_assign_load_p (s))
1994 {
1995 ref_expr = gimple_assign_rhs1 (s);
1996 is_store = false;
1997 instrument_derefs (iter, ref_expr,
1998 gimple_location (s),
1999 is_store);
2000 is_instrumented = true;
2001 }
2002
2003 if (is_instrumented)
2004 gsi_next (iter);
2005
2006 return is_instrumented;
2007 }
2008
2009 /* Instrument the function call pointed to by the iterator ITER, if it
2010 is subject to instrumentation. At the moment, the only function
2011 calls that are instrumented are some built-in functions that access
2012 memory. Look at instrument_builtin_call to learn more.
2013
2014 Upon completion return TRUE iff *ITER was advanced to the statement
2015 following the one it was originally pointing to. */
2016
2017 static bool
2018 maybe_instrument_call (gimple_stmt_iterator *iter)
2019 {
2020 gimple *stmt = gsi_stmt (*iter);
2021 bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
2022
2023 if (is_builtin && instrument_builtin_call (iter))
2024 return true;
2025
2026 if (gimple_call_noreturn_p (stmt))
2027 {
2028 if (is_builtin)
2029 {
2030 tree callee = gimple_call_fndecl (stmt);
2031 switch (DECL_FUNCTION_CODE (callee))
2032 {
2033 case BUILT_IN_UNREACHABLE:
2034 case BUILT_IN_TRAP:
2035 /* Don't instrument these. */
2036 return false;
2037 default:
2038 break;
2039 }
2040 }
2041 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
2042 gimple *g = gimple_build_call (decl, 0);
2043 gimple_set_location (g, gimple_location (stmt));
2044 gsi_insert_before (iter, g, GSI_SAME_STMT);
2045 }
2046 return false;
2047 }
2048
2049 /* Walk each instruction of all basic block and instrument those that
2050 represent memory references: loads, stores, or function calls.
2051 In a given basic block, this function avoids instrumenting memory
2052 references that have already been instrumented. */
2053
2054 static void
2055 transform_statements (void)
2056 {
2057 basic_block bb, last_bb = NULL;
2058 gimple_stmt_iterator i;
2059 int saved_last_basic_block = last_basic_block_for_fn (cfun);
2060
2061 FOR_EACH_BB_FN (bb, cfun)
2062 {
2063 basic_block prev_bb = bb;
2064
2065 if (bb->index >= saved_last_basic_block) continue;
2066
2067 /* Flush the mem ref hash table, if current bb doesn't have
2068 exactly one predecessor, or if that predecessor (skipping
2069 over asan created basic blocks) isn't the last processed
2070 basic block. Thus we effectively flush on extended basic
2071 block boundaries. */
2072 while (single_pred_p (prev_bb))
2073 {
2074 prev_bb = single_pred (prev_bb);
2075 if (prev_bb->index < saved_last_basic_block)
2076 break;
2077 }
2078 if (prev_bb != last_bb)
2079 empty_mem_ref_hash_table ();
2080 last_bb = bb;
2081
2082 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
2083 {
2084 gimple *s = gsi_stmt (i);
2085
2086 if (has_stmt_been_instrumented_p (s))
2087 gsi_next (&i);
2088 else if (gimple_assign_single_p (s)
2089 && !gimple_clobber_p (s)
2090 && maybe_instrument_assignment (&i))
2091 /* Nothing to do as maybe_instrument_assignment advanced
2092 the iterator I. */;
2093 else if (is_gimple_call (s) && maybe_instrument_call (&i))
2094 /* Nothing to do as maybe_instrument_call
2095 advanced the iterator I. */;
2096 else
2097 {
2098 /* No instrumentation happened.
2099
2100 If the current instruction is a function call that
2101 might free something, let's forget about the memory
2102 references that got instrumented. Otherwise we might
2103 miss some instrumentation opportunities. */
2104 if (is_gimple_call (s) && !nonfreeing_call_p (s))
2105 empty_mem_ref_hash_table ();
2106
2107 gsi_next (&i);
2108 }
2109 }
2110 }
2111 free_mem_ref_resources ();
2112 }
2113
2114 /* Build
2115 __asan_before_dynamic_init (module_name)
2116 or
2117 __asan_after_dynamic_init ()
2118 call. */
2119
2120 tree
2121 asan_dynamic_init_call (bool after_p)
2122 {
2123 tree fn = builtin_decl_implicit (after_p
2124 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2125 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
2126 tree module_name_cst = NULL_TREE;
2127 if (!after_p)
2128 {
2129 pretty_printer module_name_pp;
2130 pp_string (&module_name_pp, main_input_filename);
2131
2132 if (shadow_ptr_types[0] == NULL_TREE)
2133 asan_init_shadow_ptr_types ();
2134 module_name_cst = asan_pp_string (&module_name_pp);
2135 module_name_cst = fold_convert (const_ptr_type_node,
2136 module_name_cst);
2137 }
2138
2139 return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
2140 }
2141
2142 /* Build
2143 struct __asan_global
2144 {
2145 const void *__beg;
2146 uptr __size;
2147 uptr __size_with_redzone;
2148 const void *__name;
2149 const void *__module_name;
2150 uptr __has_dynamic_init;
2151 __asan_global_source_location *__location;
2152 } type. */
2153
2154 static tree
2155 asan_global_struct (void)
2156 {
2157 static const char *field_names[7]
2158 = { "__beg", "__size", "__size_with_redzone",
2159 "__name", "__module_name", "__has_dynamic_init", "__location"};
2160 tree fields[7], ret;
2161 int i;
2162
2163 ret = make_node (RECORD_TYPE);
2164 for (i = 0; i < 7; i++)
2165 {
2166 fields[i]
2167 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2168 get_identifier (field_names[i]),
2169 (i == 0 || i == 3) ? const_ptr_type_node
2170 : pointer_sized_int_node);
2171 DECL_CONTEXT (fields[i]) = ret;
2172 if (i)
2173 DECL_CHAIN (fields[i - 1]) = fields[i];
2174 }
2175 tree type_decl = build_decl (input_location, TYPE_DECL,
2176 get_identifier ("__asan_global"), ret);
2177 DECL_IGNORED_P (type_decl) = 1;
2178 DECL_ARTIFICIAL (type_decl) = 1;
2179 TYPE_FIELDS (ret) = fields[0];
2180 TYPE_NAME (ret) = type_decl;
2181 TYPE_STUB_DECL (ret) = type_decl;
2182 layout_type (ret);
2183 return ret;
2184 }
2185
2186 /* Append description of a single global DECL into vector V.
2187 TYPE is __asan_global struct type as returned by asan_global_struct. */
2188
2189 static void
2190 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
2191 {
2192 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2193 unsigned HOST_WIDE_INT size;
2194 tree str_cst, module_name_cst, refdecl = decl;
2195 vec<constructor_elt, va_gc> *vinner = NULL;
2196
2197 pretty_printer asan_pp, module_name_pp;
2198
2199 if (DECL_NAME (decl))
2200 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
2201 else
2202 pp_string (&asan_pp, "<unknown>");
2203 str_cst = asan_pp_string (&asan_pp);
2204
2205 pp_string (&module_name_pp, main_input_filename);
2206 module_name_cst = asan_pp_string (&module_name_pp);
2207
2208 if (asan_needs_local_alias (decl))
2209 {
2210 char buf[20];
2211 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
2212 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2213 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2214 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2215 TREE_READONLY (refdecl) = TREE_READONLY (decl);
2216 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2217 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2218 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2219 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2220 TREE_STATIC (refdecl) = 1;
2221 TREE_PUBLIC (refdecl) = 0;
2222 TREE_USED (refdecl) = 1;
2223 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2224 }
2225
2226 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2227 fold_convert (const_ptr_type_node,
2228 build_fold_addr_expr (refdecl)));
2229 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2230 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2231 size += asan_red_zone_size (size);
2232 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2233 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2234 fold_convert (const_ptr_type_node, str_cst));
2235 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2236 fold_convert (const_ptr_type_node, module_name_cst));
2237 varpool_node *vnode = varpool_node::get (decl);
2238 int has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
2239 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2240 build_int_cst (uptr, has_dynamic_init));
2241 tree locptr = NULL_TREE;
2242 location_t loc = DECL_SOURCE_LOCATION (decl);
2243 expanded_location xloc = expand_location (loc);
2244 if (xloc.file != NULL)
2245 {
2246 static int lasanloccnt = 0;
2247 char buf[25];
2248 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt);
2249 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2250 ubsan_get_source_location_type ());
2251 TREE_STATIC (var) = 1;
2252 TREE_PUBLIC (var) = 0;
2253 DECL_ARTIFICIAL (var) = 1;
2254 DECL_IGNORED_P (var) = 1;
2255 pretty_printer filename_pp;
2256 pp_string (&filename_pp, xloc.file);
2257 tree str = asan_pp_string (&filename_pp);
2258 tree ctor = build_constructor_va (TREE_TYPE (var), 3,
2259 NULL_TREE, str, NULL_TREE,
2260 build_int_cst (unsigned_type_node,
2261 xloc.line), NULL_TREE,
2262 build_int_cst (unsigned_type_node,
2263 xloc.column));
2264 TREE_CONSTANT (ctor) = 1;
2265 TREE_STATIC (ctor) = 1;
2266 DECL_INITIAL (var) = ctor;
2267 varpool_node::finalize_decl (var);
2268 locptr = fold_convert (uptr, build_fold_addr_expr (var));
2269 }
2270 else
2271 locptr = build_int_cst (uptr, 0);
2272 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr);
2273 init = build_constructor (type, vinner);
2274 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2275 }
2276
2277 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2278 void
2279 initialize_sanitizer_builtins (void)
2280 {
2281 tree decl;
2282
2283 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2284 return;
2285
2286 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2287 tree BT_FN_VOID_PTR
2288 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2289 tree BT_FN_VOID_CONST_PTR
2290 = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
2291 tree BT_FN_VOID_PTR_PTR
2292 = build_function_type_list (void_type_node, ptr_type_node,
2293 ptr_type_node, NULL_TREE);
2294 tree BT_FN_VOID_PTR_PTR_PTR
2295 = build_function_type_list (void_type_node, ptr_type_node,
2296 ptr_type_node, ptr_type_node, NULL_TREE);
2297 tree BT_FN_VOID_PTR_PTRMODE
2298 = build_function_type_list (void_type_node, ptr_type_node,
2299 pointer_sized_int_node, NULL_TREE);
2300 tree BT_FN_VOID_INT
2301 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
2302 tree BT_FN_SIZE_CONST_PTR_INT
2303 = build_function_type_list (size_type_node, const_ptr_type_node,
2304 integer_type_node, NULL_TREE);
2305 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2306 tree BT_FN_IX_CONST_VPTR_INT[5];
2307 tree BT_FN_IX_VPTR_IX_INT[5];
2308 tree BT_FN_VOID_VPTR_IX_INT[5];
2309 tree vptr
2310 = build_pointer_type (build_qualified_type (void_type_node,
2311 TYPE_QUAL_VOLATILE));
2312 tree cvptr
2313 = build_pointer_type (build_qualified_type (void_type_node,
2314 TYPE_QUAL_VOLATILE
2315 |TYPE_QUAL_CONST));
2316 tree boolt
2317 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2318 int i;
2319 for (i = 0; i < 5; i++)
2320 {
2321 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2322 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2323 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
2324 integer_type_node, integer_type_node,
2325 NULL_TREE);
2326 BT_FN_IX_CONST_VPTR_INT[i]
2327 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2328 BT_FN_IX_VPTR_IX_INT[i]
2329 = build_function_type_list (ix, vptr, ix, integer_type_node,
2330 NULL_TREE);
2331 BT_FN_VOID_VPTR_IX_INT[i]
2332 = build_function_type_list (void_type_node, vptr, ix,
2333 integer_type_node, NULL_TREE);
2334 }
2335 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2336 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2337 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2338 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2339 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2340 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2341 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2342 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2343 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2344 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2345 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2346 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2347 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2348 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2349 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2350 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2351 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2352 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2353 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2354 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2355 #undef ATTR_NOTHROW_LEAF_LIST
2356 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2357 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2358 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2359 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2360 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2361 #undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2362 #define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
2363 ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
2364 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2365 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2366 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2367 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2368 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2369 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2370 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2371 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2372 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2373 #undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
2374 #define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
2375 /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2376 #undef ATTR_PURE_NOTHROW_LEAF_LIST
2377 #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
2378 #undef DEF_SANITIZER_BUILTIN
2379 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2380 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2381 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2382 set_call_expr_flags (decl, ATTRS); \
2383 set_builtin_decl (ENUM, decl, true);
2384
2385 #include "sanitizer.def"
2386
2387 /* -fsanitize=object-size uses __builtin_object_size, but that might
2388 not be available for e.g. Fortran at this point. We use
2389 DEF_SANITIZER_BUILTIN here only as a convenience macro. */
2390 if ((flag_sanitize & SANITIZE_OBJECT_SIZE)
2391 && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE))
2392 DEF_SANITIZER_BUILTIN (BUILT_IN_OBJECT_SIZE, "object_size",
2393 BT_FN_SIZE_CONST_PTR_INT,
2394 ATTR_PURE_NOTHROW_LEAF_LIST)
2395
2396 #undef DEF_SANITIZER_BUILTIN
2397 }
2398
2399 /* Called via htab_traverse. Count number of emitted
2400 STRING_CSTs in the constant hash table. */
2401
2402 int
2403 count_string_csts (constant_descriptor_tree **slot,
2404 unsigned HOST_WIDE_INT *data)
2405 {
2406 struct constant_descriptor_tree *desc = *slot;
2407 if (TREE_CODE (desc->value) == STRING_CST
2408 && TREE_ASM_WRITTEN (desc->value)
2409 && asan_protect_global (desc->value))
2410 ++*data;
2411 return 1;
2412 }
2413
2414 /* Helper structure to pass two parameters to
2415 add_string_csts. */
2416
2417 struct asan_add_string_csts_data
2418 {
2419 tree type;
2420 vec<constructor_elt, va_gc> *v;
2421 };
2422
2423 /* Called via hash_table::traverse. Call asan_add_global
2424 on emitted STRING_CSTs from the constant hash table. */
2425
2426 int
2427 add_string_csts (constant_descriptor_tree **slot,
2428 asan_add_string_csts_data *aascd)
2429 {
2430 struct constant_descriptor_tree *desc = *slot;
2431 if (TREE_CODE (desc->value) == STRING_CST
2432 && TREE_ASM_WRITTEN (desc->value)
2433 && asan_protect_global (desc->value))
2434 {
2435 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
2436 aascd->type, aascd->v);
2437 }
2438 return 1;
2439 }
2440
2441 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2442 invoke ggc_collect. */
2443 static GTY(()) tree asan_ctor_statements;
2444
2445 /* Module-level instrumentation.
2446 - Insert __asan_init_vN() into the list of CTORs.
2447 - TODO: insert redzones around globals.
2448 */
2449
2450 void
2451 asan_finish_file (void)
2452 {
2453 varpool_node *vnode;
2454 unsigned HOST_WIDE_INT gcount = 0;
2455
2456 if (shadow_ptr_types[0] == NULL_TREE)
2457 asan_init_shadow_ptr_types ();
2458 /* Avoid instrumenting code in the asan ctors/dtors.
2459 We don't need to insert padding after the description strings,
2460 nor after .LASAN* array. */
2461 flag_sanitize &= ~SANITIZE_ADDRESS;
2462
2463 /* For user-space we want asan constructors to run first.
2464 Linux kernel does not support priorities other than default, and the only
2465 other user of constructors is coverage. So we run with the default
2466 priority. */
2467 int priority = flag_sanitize & SANITIZE_USER_ADDRESS
2468 ? MAX_RESERVED_INIT_PRIORITY - 1 : DEFAULT_INIT_PRIORITY;
2469
2470 if (flag_sanitize & SANITIZE_USER_ADDRESS)
2471 {
2472 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
2473 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2474 fn = builtin_decl_implicit (BUILT_IN_ASAN_VERSION_MISMATCH_CHECK);
2475 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2476 }
2477 FOR_EACH_DEFINED_VARIABLE (vnode)
2478 if (TREE_ASM_WRITTEN (vnode->decl)
2479 && asan_protect_global (vnode->decl))
2480 ++gcount;
2481 hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab ();
2482 const_desc_htab->traverse<unsigned HOST_WIDE_INT *, count_string_csts>
2483 (&gcount);
2484 if (gcount)
2485 {
2486 tree type = asan_global_struct (), var, ctor;
2487 tree dtor_statements = NULL_TREE;
2488 vec<constructor_elt, va_gc> *v;
2489 char buf[20];
2490
2491 type = build_array_type_nelts (type, gcount);
2492 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
2493 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2494 type);
2495 TREE_STATIC (var) = 1;
2496 TREE_PUBLIC (var) = 0;
2497 DECL_ARTIFICIAL (var) = 1;
2498 DECL_IGNORED_P (var) = 1;
2499 vec_alloc (v, gcount);
2500 FOR_EACH_DEFINED_VARIABLE (vnode)
2501 if (TREE_ASM_WRITTEN (vnode->decl)
2502 && asan_protect_global (vnode->decl))
2503 asan_add_global (vnode->decl, TREE_TYPE (type), v);
2504 struct asan_add_string_csts_data aascd;
2505 aascd.type = TREE_TYPE (type);
2506 aascd.v = v;
2507 const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts>
2508 (&aascd);
2509 ctor = build_constructor (type, v);
2510 TREE_CONSTANT (ctor) = 1;
2511 TREE_STATIC (ctor) = 1;
2512 DECL_INITIAL (var) = ctor;
2513 varpool_node::finalize_decl (var);
2514
2515 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
2516 tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
2517 append_to_statement_list (build_call_expr (fn, 2,
2518 build_fold_addr_expr (var),
2519 gcount_tree),
2520 &asan_ctor_statements);
2521
2522 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
2523 append_to_statement_list (build_call_expr (fn, 2,
2524 build_fold_addr_expr (var),
2525 gcount_tree),
2526 &dtor_statements);
2527 cgraph_build_static_cdtor ('D', dtor_statements, priority);
2528 }
2529 if (asan_ctor_statements)
2530 cgraph_build_static_cdtor ('I', asan_ctor_statements, priority);
2531 flag_sanitize |= SANITIZE_ADDRESS;
2532 }
2533
2534 /* Expand the ASAN_{LOAD,STORE} builtins. */
2535
2536 bool
2537 asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
2538 {
2539 gimple *g = gsi_stmt (*iter);
2540 location_t loc = gimple_location (g);
2541
2542 bool recover_p
2543 = (flag_sanitize & flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
2544
2545 HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
2546 gcc_assert (flags < ASAN_CHECK_LAST);
2547 bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
2548 bool is_store = (flags & ASAN_CHECK_STORE) != 0;
2549 bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
2550
2551 tree base = gimple_call_arg (g, 1);
2552 tree len = gimple_call_arg (g, 2);
2553 HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3));
2554
2555 HOST_WIDE_INT size_in_bytes
2556 = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2557
2558 if (use_calls)
2559 {
2560 /* Instrument using callbacks. */
2561 gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2562 NOP_EXPR, base);
2563 gimple_set_location (g, loc);
2564 gsi_insert_before (iter, g, GSI_SAME_STMT);
2565 tree base_addr = gimple_assign_lhs (g);
2566
2567 int nargs;
2568 tree fun = check_func (is_store, recover_p, size_in_bytes, &nargs);
2569 if (nargs == 1)
2570 g = gimple_build_call (fun, 1, base_addr);
2571 else
2572 {
2573 gcc_assert (nargs == 2);
2574 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2575 NOP_EXPR, len);
2576 gimple_set_location (g, loc);
2577 gsi_insert_before (iter, g, GSI_SAME_STMT);
2578 tree sz_arg = gimple_assign_lhs (g);
2579 g = gimple_build_call (fun, nargs, base_addr, sz_arg);
2580 }
2581 gimple_set_location (g, loc);
2582 gsi_replace (iter, g, false);
2583 return false;
2584 }
2585
2586 HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
2587
2588 tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
2589 tree shadow_type = TREE_TYPE (shadow_ptr_type);
2590
2591 gimple_stmt_iterator gsi = *iter;
2592
2593 if (!is_non_zero_len)
2594 {
2595 /* So, the length of the memory area to asan-protect is
2596 non-constant. Let's guard the generated instrumentation code
2597 like:
2598
2599 if (len != 0)
2600 {
2601 //asan instrumentation code goes here.
2602 }
2603 // falltrough instructions, starting with *ITER. */
2604
2605 g = gimple_build_cond (NE_EXPR,
2606 len,
2607 build_int_cst (TREE_TYPE (len), 0),
2608 NULL_TREE, NULL_TREE);
2609 gimple_set_location (g, loc);
2610
2611 basic_block then_bb, fallthrough_bb;
2612 insert_if_then_before_iter (as_a <gcond *> (g), iter,
2613 /*then_more_likely_p=*/true,
2614 &then_bb, &fallthrough_bb);
2615 /* Note that fallthrough_bb starts with the statement that was
2616 pointed to by ITER. */
2617
2618 /* The 'then block' of the 'if (len != 0) condition is where
2619 we'll generate the asan instrumentation code now. */
2620 gsi = gsi_last_bb (then_bb);
2621 }
2622
2623 /* Get an iterator on the point where we can add the condition
2624 statement for the instrumentation. */
2625 basic_block then_bb, else_bb;
2626 gsi = create_cond_insert_point (&gsi, /*before_p*/false,
2627 /*then_more_likely_p=*/false,
2628 /*create_then_fallthru_edge*/recover_p,
2629 &then_bb,
2630 &else_bb);
2631
2632 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2633 NOP_EXPR, base);
2634 gimple_set_location (g, loc);
2635 gsi_insert_before (&gsi, g, GSI_NEW_STMT);
2636 tree base_addr = gimple_assign_lhs (g);
2637
2638 tree t = NULL_TREE;
2639 if (real_size_in_bytes >= 8)
2640 {
2641 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2642 shadow_ptr_type);
2643 t = shadow;
2644 }
2645 else
2646 {
2647 /* Slow path for 1, 2 and 4 byte accesses. */
2648 /* Test (shadow != 0)
2649 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
2650 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2651 shadow_ptr_type);
2652 gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
2653 gimple_seq seq = NULL;
2654 gimple_seq_add_stmt (&seq, shadow_test);
2655 /* Aligned (>= 8 bytes) can test just
2656 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
2657 to be 0. */
2658 if (align < 8)
2659 {
2660 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2661 base_addr, 7));
2662 gimple_seq_add_stmt (&seq,
2663 build_type_cast (shadow_type,
2664 gimple_seq_last (seq)));
2665 if (real_size_in_bytes > 1)
2666 gimple_seq_add_stmt (&seq,
2667 build_assign (PLUS_EXPR,
2668 gimple_seq_last (seq),
2669 real_size_in_bytes - 1));
2670 t = gimple_assign_lhs (gimple_seq_last_stmt (seq));
2671 }
2672 else
2673 t = build_int_cst (shadow_type, real_size_in_bytes - 1);
2674 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow));
2675 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2676 gimple_seq_last (seq)));
2677 t = gimple_assign_lhs (gimple_seq_last (seq));
2678 gimple_seq_set_location (seq, loc);
2679 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2680
2681 /* For non-constant, misaligned or otherwise weird access sizes,
2682 check first and last byte. */
2683 if (size_in_bytes == -1)
2684 {
2685 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2686 MINUS_EXPR, len,
2687 build_int_cst (pointer_sized_int_node, 1));
2688 gimple_set_location (g, loc);
2689 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2690 tree last = gimple_assign_lhs (g);
2691 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2692 PLUS_EXPR, base_addr, last);
2693 gimple_set_location (g, loc);
2694 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2695 tree base_end_addr = gimple_assign_lhs (g);
2696
2697 tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
2698 shadow_ptr_type);
2699 gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
2700 gimple_seq seq = NULL;
2701 gimple_seq_add_stmt (&seq, shadow_test);
2702 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2703 base_end_addr, 7));
2704 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
2705 gimple_seq_last (seq)));
2706 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
2707 gimple_seq_last (seq),
2708 shadow));
2709 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2710 gimple_seq_last (seq)));
2711 gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
2712 gimple_seq_last (seq)));
2713 t = gimple_assign_lhs (gimple_seq_last (seq));
2714 gimple_seq_set_location (seq, loc);
2715 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2716 }
2717 }
2718
2719 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
2720 NULL_TREE, NULL_TREE);
2721 gimple_set_location (g, loc);
2722 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2723
2724 /* Generate call to the run-time library (e.g. __asan_report_load8). */
2725 gsi = gsi_start_bb (then_bb);
2726 int nargs;
2727 tree fun = report_error_func (is_store, recover_p, size_in_bytes, &nargs);
2728 g = gimple_build_call (fun, nargs, base_addr, len);
2729 gimple_set_location (g, loc);
2730 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2731
2732 gsi_remove (iter, true);
2733 *iter = gsi_start_bb (else_bb);
2734
2735 return true;
2736 }
2737
2738 /* Instrument the current function. */
2739
2740 static unsigned int
2741 asan_instrument (void)
2742 {
2743 if (shadow_ptr_types[0] == NULL_TREE)
2744 asan_init_shadow_ptr_types ();
2745 transform_statements ();
2746 return 0;
2747 }
2748
2749 static bool
2750 gate_asan (void)
2751 {
2752 return (flag_sanitize & SANITIZE_ADDRESS) != 0
2753 && !lookup_attribute ("no_sanitize_address",
2754 DECL_ATTRIBUTES (current_function_decl));
2755 }
2756
2757 namespace {
2758
2759 const pass_data pass_data_asan =
2760 {
2761 GIMPLE_PASS, /* type */
2762 "asan", /* name */
2763 OPTGROUP_NONE, /* optinfo_flags */
2764 TV_NONE, /* tv_id */
2765 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2766 0, /* properties_provided */
2767 0, /* properties_destroyed */
2768 0, /* todo_flags_start */
2769 TODO_update_ssa, /* todo_flags_finish */
2770 };
2771
2772 class pass_asan : public gimple_opt_pass
2773 {
2774 public:
2775 pass_asan (gcc::context *ctxt)
2776 : gimple_opt_pass (pass_data_asan, ctxt)
2777 {}
2778
2779 /* opt_pass methods: */
2780 opt_pass * clone () { return new pass_asan (m_ctxt); }
2781 virtual bool gate (function *) { return gate_asan (); }
2782 virtual unsigned int execute (function *) { return asan_instrument (); }
2783
2784 }; // class pass_asan
2785
2786 } // anon namespace
2787
2788 gimple_opt_pass *
2789 make_pass_asan (gcc::context *ctxt)
2790 {
2791 return new pass_asan (ctxt);
2792 }
2793
2794 namespace {
2795
2796 const pass_data pass_data_asan_O0 =
2797 {
2798 GIMPLE_PASS, /* type */
2799 "asan0", /* name */
2800 OPTGROUP_NONE, /* optinfo_flags */
2801 TV_NONE, /* tv_id */
2802 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2803 0, /* properties_provided */
2804 0, /* properties_destroyed */
2805 0, /* todo_flags_start */
2806 TODO_update_ssa, /* todo_flags_finish */
2807 };
2808
2809 class pass_asan_O0 : public gimple_opt_pass
2810 {
2811 public:
2812 pass_asan_O0 (gcc::context *ctxt)
2813 : gimple_opt_pass (pass_data_asan_O0, ctxt)
2814 {}
2815
2816 /* opt_pass methods: */
2817 virtual bool gate (function *) { return !optimize && gate_asan (); }
2818 virtual unsigned int execute (function *) { return asan_instrument (); }
2819
2820 }; // class pass_asan_O0
2821
2822 } // anon namespace
2823
2824 gimple_opt_pass *
2825 make_pass_asan_O0 (gcc::context *ctxt)
2826 {
2827 return new pass_asan_O0 (ctxt);
2828 }
2829
2830 #include "gt-asan.h"