gimple-predict.h: New file.
[gcc.git] / gcc / asan.c
1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2015 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "alias.h"
26 #include "backend.h"
27 #include "cfghooks.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "rtl.h"
31 #include "options.h"
32 #include "fold-const.h"
33 #include "cfganal.h"
34 #include "internal-fn.h"
35 #include "gimplify.h"
36 #include "gimple-iterator.h"
37 #include "calls.h"
38 #include "varasm.h"
39 #include "stor-layout.h"
40 #include "tree-iterator.h"
41 #include "cgraph.h"
42 #include "stringpool.h"
43 #include "tree-ssanames.h"
44 #include "tree-pass.h"
45 #include "asan.h"
46 #include "gimple-pretty-print.h"
47 #include "target.h"
48 #include "flags.h"
49 #include "insn-config.h"
50 #include "expmed.h"
51 #include "dojump.h"
52 #include "explow.h"
53 #include "emit-rtl.h"
54 #include "stmt.h"
55 #include "expr.h"
56 #include "insn-codes.h"
57 #include "optabs.h"
58 #include "output.h"
59 #include "tm_p.h"
60 #include "langhooks.h"
61 #include "alloc-pool.h"
62 #include "cfgloop.h"
63 #include "gimple-builder.h"
64 #include "ubsan.h"
65 #include "params.h"
66 #include "builtins.h"
67 #include "fnmatch.h"
68
69 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
70 with <2x slowdown on average.
71
72 The tool consists of two parts:
73 instrumentation module (this file) and a run-time library.
74 The instrumentation module adds a run-time check before every memory insn.
75 For a 8- or 16- byte load accessing address X:
76 ShadowAddr = (X >> 3) + Offset
77 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
78 if (ShadowValue)
79 __asan_report_load8(X);
80 For a load of N bytes (N=1, 2 or 4) from address X:
81 ShadowAddr = (X >> 3) + Offset
82 ShadowValue = *(char*)ShadowAddr;
83 if (ShadowValue)
84 if ((X & 7) + N - 1 > ShadowValue)
85 __asan_report_loadN(X);
86 Stores are instrumented similarly, but using __asan_report_storeN functions.
87 A call too __asan_init_vN() is inserted to the list of module CTORs.
88 N is the version number of the AddressSanitizer API. The changes between the
89 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
90
91 The run-time library redefines malloc (so that redzone are inserted around
92 the allocated memory) and free (so that reuse of free-ed memory is delayed),
93 provides __asan_report* and __asan_init_vN functions.
94
95 Read more:
96 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
97
98 The current implementation supports detection of out-of-bounds and
99 use-after-free in the heap, on the stack and for global variables.
100
101 [Protection of stack variables]
102
103 To understand how detection of out-of-bounds and use-after-free works
104 for stack variables, lets look at this example on x86_64 where the
105 stack grows downward:
106
107 int
108 foo ()
109 {
110 char a[23] = {0};
111 int b[2] = {0};
112
113 a[5] = 1;
114 b[1] = 2;
115
116 return a[5] + b[1];
117 }
118
119 For this function, the stack protected by asan will be organized as
120 follows, from the top of the stack to the bottom:
121
122 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
123
124 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
125 the next slot be 32 bytes aligned; this one is called Partial
126 Redzone; this 32 bytes alignment is an asan constraint]
127
128 Slot 3/ [24 bytes for variable 'a']
129
130 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
131
132 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
133
134 Slot 6/ [8 bytes for variable 'b']
135
136 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
137 'LEFT RedZone']
138
139 The 32 bytes of LEFT red zone at the bottom of the stack can be
140 decomposed as such:
141
142 1/ The first 8 bytes contain a magical asan number that is always
143 0x41B58AB3.
144
145 2/ The following 8 bytes contains a pointer to a string (to be
146 parsed at runtime by the runtime asan library), which format is
147 the following:
148
149 "<function-name> <space> <num-of-variables-on-the-stack>
150 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
151 <length-of-var-in-bytes> ){n} "
152
153 where '(...){n}' means the content inside the parenthesis occurs 'n'
154 times, with 'n' being the number of variables on the stack.
155
156 3/ The following 8 bytes contain the PC of the current function which
157 will be used by the run-time library to print an error message.
158
159 4/ The following 8 bytes are reserved for internal use by the run-time.
160
161 The shadow memory for that stack layout is going to look like this:
162
163 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
164 The F1 byte pattern is a magic number called
165 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
166 the memory for that shadow byte is part of a the LEFT red zone
167 intended to seat at the bottom of the variables on the stack.
168
169 - content of shadow memory 8 bytes for slots 6 and 5:
170 0xF4F4F400. The F4 byte pattern is a magic number
171 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
172 memory region for this shadow byte is a PARTIAL red zone
173 intended to pad a variable A, so that the slot following
174 {A,padding} is 32 bytes aligned.
175
176 Note that the fact that the least significant byte of this
177 shadow memory content is 00 means that 8 bytes of its
178 corresponding memory (which corresponds to the memory of
179 variable 'b') is addressable.
180
181 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
182 The F2 byte pattern is a magic number called
183 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
184 region for this shadow byte is a MIDDLE red zone intended to
185 seat between two 32 aligned slots of {variable,padding}.
186
187 - content of shadow memory 8 bytes for slot 3 and 2:
188 0xF4000000. This represents is the concatenation of
189 variable 'a' and the partial red zone following it, like what we
190 had for variable 'b'. The least significant 3 bytes being 00
191 means that the 3 bytes of variable 'a' are addressable.
192
193 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
194 The F3 byte pattern is a magic number called
195 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
196 region for this shadow byte is a RIGHT red zone intended to seat
197 at the top of the variables of the stack.
198
199 Note that the real variable layout is done in expand_used_vars in
200 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
201 stack variables as well as the different red zones, emits some
202 prologue code to populate the shadow memory as to poison (mark as
203 non-accessible) the regions of the red zones and mark the regions of
204 stack variables as accessible, and emit some epilogue code to
205 un-poison (mark as accessible) the regions of red zones right before
206 the function exits.
207
208 [Protection of global variables]
209
210 The basic idea is to insert a red zone between two global variables
211 and install a constructor function that calls the asan runtime to do
212 the populating of the relevant shadow memory regions at load time.
213
214 So the global variables are laid out as to insert a red zone between
215 them. The size of the red zones is so that each variable starts on a
216 32 bytes boundary.
217
218 Then a constructor function is installed so that, for each global
219 variable, it calls the runtime asan library function
220 __asan_register_globals_with an instance of this type:
221
222 struct __asan_global
223 {
224 // Address of the beginning of the global variable.
225 const void *__beg;
226
227 // Initial size of the global variable.
228 uptr __size;
229
230 // Size of the global variable + size of the red zone. This
231 // size is 32 bytes aligned.
232 uptr __size_with_redzone;
233
234 // Name of the global variable.
235 const void *__name;
236
237 // Name of the module where the global variable is declared.
238 const void *__module_name;
239
240 // 1 if it has dynamic initialization, 0 otherwise.
241 uptr __has_dynamic_init;
242
243 // A pointer to struct that contains source location, could be NULL.
244 __asan_global_source_location *__location;
245 }
246
247 A destructor function that calls the runtime asan library function
248 _asan_unregister_globals is also installed. */
249
250 static unsigned HOST_WIDE_INT asan_shadow_offset_value;
251 static bool asan_shadow_offset_computed;
252 static vec<char *> sanitized_sections;
253
254 /* Sets shadow offset to value in string VAL. */
255
256 bool
257 set_asan_shadow_offset (const char *val)
258 {
259 char *endp;
260
261 errno = 0;
262 #ifdef HAVE_LONG_LONG
263 asan_shadow_offset_value = strtoull (val, &endp, 0);
264 #else
265 asan_shadow_offset_value = strtoul (val, &endp, 0);
266 #endif
267 if (!(*val != '\0' && *endp == '\0' && errno == 0))
268 return false;
269
270 asan_shadow_offset_computed = true;
271
272 return true;
273 }
274
275 /* Set list of user-defined sections that need to be sanitized. */
276
277 void
278 set_sanitized_sections (const char *sections)
279 {
280 char *pat;
281 unsigned i;
282 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
283 free (pat);
284 sanitized_sections.truncate (0);
285
286 for (const char *s = sections; *s; )
287 {
288 const char *end;
289 for (end = s; *end && *end != ','; ++end);
290 size_t len = end - s;
291 sanitized_sections.safe_push (xstrndup (s, len));
292 s = *end ? end + 1 : end;
293 }
294 }
295
296 /* Checks whether section SEC should be sanitized. */
297
298 static bool
299 section_sanitized_p (const char *sec)
300 {
301 char *pat;
302 unsigned i;
303 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
304 if (fnmatch (pat, sec, FNM_PERIOD) == 0)
305 return true;
306 return false;
307 }
308
309 /* Returns Asan shadow offset. */
310
311 static unsigned HOST_WIDE_INT
312 asan_shadow_offset ()
313 {
314 if (!asan_shadow_offset_computed)
315 {
316 asan_shadow_offset_computed = true;
317 asan_shadow_offset_value = targetm.asan_shadow_offset ();
318 }
319 return asan_shadow_offset_value;
320 }
321
322 alias_set_type asan_shadow_set = -1;
323
324 /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
325 alias set is used for all shadow memory accesses. */
326 static GTY(()) tree shadow_ptr_types[2];
327
328 /* Decl for __asan_option_detect_stack_use_after_return. */
329 static GTY(()) tree asan_detect_stack_use_after_return;
330
331 /* Various flags for Asan builtins. */
332 enum asan_check_flags
333 {
334 ASAN_CHECK_STORE = 1 << 0,
335 ASAN_CHECK_SCALAR_ACCESS = 1 << 1,
336 ASAN_CHECK_NON_ZERO_LEN = 1 << 2,
337 ASAN_CHECK_LAST = 1 << 3
338 };
339
340 /* Hashtable support for memory references used by gimple
341 statements. */
342
343 /* This type represents a reference to a memory region. */
344 struct asan_mem_ref
345 {
346 /* The expression of the beginning of the memory region. */
347 tree start;
348
349 /* The size of the access. */
350 HOST_WIDE_INT access_size;
351
352 /* Pool allocation new operator. */
353 inline void *operator new (size_t)
354 {
355 return pool.allocate ();
356 }
357
358 /* Delete operator utilizing pool allocation. */
359 inline void operator delete (void *ptr)
360 {
361 pool.remove ((asan_mem_ref *) ptr);
362 }
363
364 /* Memory allocation pool. */
365 static pool_allocator<asan_mem_ref> pool;
366 };
367
368 pool_allocator<asan_mem_ref> asan_mem_ref::pool ("asan_mem_ref", 10);
369
370 /* Initializes an instance of asan_mem_ref. */
371
372 static void
373 asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
374 {
375 ref->start = start;
376 ref->access_size = access_size;
377 }
378
379 /* Allocates memory for an instance of asan_mem_ref into the memory
380 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
381 START is the address of (or the expression pointing to) the
382 beginning of memory reference. ACCESS_SIZE is the size of the
383 access to the referenced memory. */
384
385 static asan_mem_ref*
386 asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
387 {
388 asan_mem_ref *ref = new asan_mem_ref;
389
390 asan_mem_ref_init (ref, start, access_size);
391 return ref;
392 }
393
394 /* This builds and returns a pointer to the end of the memory region
395 that starts at START and of length LEN. */
396
397 tree
398 asan_mem_ref_get_end (tree start, tree len)
399 {
400 if (len == NULL_TREE || integer_zerop (len))
401 return start;
402
403 if (!ptrofftype_p (len))
404 len = convert_to_ptrofftype (len);
405
406 return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
407 }
408
409 /* Return a tree expression that represents the end of the referenced
410 memory region. Beware that this function can actually build a new
411 tree expression. */
412
413 tree
414 asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
415 {
416 return asan_mem_ref_get_end (ref->start, len);
417 }
418
419 struct asan_mem_ref_hasher : nofree_ptr_hash <asan_mem_ref>
420 {
421 static inline hashval_t hash (const asan_mem_ref *);
422 static inline bool equal (const asan_mem_ref *, const asan_mem_ref *);
423 };
424
425 /* Hash a memory reference. */
426
427 inline hashval_t
428 asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
429 {
430 return iterative_hash_expr (mem_ref->start, 0);
431 }
432
433 /* Compare two memory references. We accept the length of either
434 memory references to be NULL_TREE. */
435
436 inline bool
437 asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
438 const asan_mem_ref *m2)
439 {
440 return operand_equal_p (m1->start, m2->start, 0);
441 }
442
443 static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
444
445 /* Returns a reference to the hash table containing memory references.
446 This function ensures that the hash table is created. Note that
447 this hash table is updated by the function
448 update_mem_ref_hash_table. */
449
450 static hash_table<asan_mem_ref_hasher> *
451 get_mem_ref_hash_table ()
452 {
453 if (!asan_mem_ref_ht)
454 asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
455
456 return asan_mem_ref_ht;
457 }
458
459 /* Clear all entries from the memory references hash table. */
460
461 static void
462 empty_mem_ref_hash_table ()
463 {
464 if (asan_mem_ref_ht)
465 asan_mem_ref_ht->empty ();
466 }
467
468 /* Free the memory references hash table. */
469
470 static void
471 free_mem_ref_resources ()
472 {
473 delete asan_mem_ref_ht;
474 asan_mem_ref_ht = NULL;
475
476 asan_mem_ref::pool.release ();
477 }
478
479 /* Return true iff the memory reference REF has been instrumented. */
480
481 static bool
482 has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
483 {
484 asan_mem_ref r;
485 asan_mem_ref_init (&r, ref, access_size);
486
487 asan_mem_ref *saved_ref = get_mem_ref_hash_table ()->find (&r);
488 return saved_ref && saved_ref->access_size >= access_size;
489 }
490
491 /* Return true iff the memory reference REF has been instrumented. */
492
493 static bool
494 has_mem_ref_been_instrumented (const asan_mem_ref *ref)
495 {
496 return has_mem_ref_been_instrumented (ref->start, ref->access_size);
497 }
498
499 /* Return true iff access to memory region starting at REF and of
500 length LEN has been instrumented. */
501
502 static bool
503 has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
504 {
505 HOST_WIDE_INT size_in_bytes
506 = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
507
508 return size_in_bytes != -1
509 && has_mem_ref_been_instrumented (ref->start, size_in_bytes);
510 }
511
512 /* Set REF to the memory reference present in a gimple assignment
513 ASSIGNMENT. Return true upon successful completion, false
514 otherwise. */
515
516 static bool
517 get_mem_ref_of_assignment (const gassign *assignment,
518 asan_mem_ref *ref,
519 bool *ref_is_store)
520 {
521 gcc_assert (gimple_assign_single_p (assignment));
522
523 if (gimple_store_p (assignment)
524 && !gimple_clobber_p (assignment))
525 {
526 ref->start = gimple_assign_lhs (assignment);
527 *ref_is_store = true;
528 }
529 else if (gimple_assign_load_p (assignment))
530 {
531 ref->start = gimple_assign_rhs1 (assignment);
532 *ref_is_store = false;
533 }
534 else
535 return false;
536
537 ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
538 return true;
539 }
540
541 /* Return the memory references contained in a gimple statement
542 representing a builtin call that has to do with memory access. */
543
544 static bool
545 get_mem_refs_of_builtin_call (const gcall *call,
546 asan_mem_ref *src0,
547 tree *src0_len,
548 bool *src0_is_store,
549 asan_mem_ref *src1,
550 tree *src1_len,
551 bool *src1_is_store,
552 asan_mem_ref *dst,
553 tree *dst_len,
554 bool *dst_is_store,
555 bool *dest_is_deref,
556 bool *intercepted_p)
557 {
558 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
559
560 tree callee = gimple_call_fndecl (call);
561 tree source0 = NULL_TREE, source1 = NULL_TREE,
562 dest = NULL_TREE, len = NULL_TREE;
563 bool is_store = true, got_reference_p = false;
564 HOST_WIDE_INT access_size = 1;
565
566 *intercepted_p = asan_intercepted_p ((DECL_FUNCTION_CODE (callee)));
567
568 switch (DECL_FUNCTION_CODE (callee))
569 {
570 /* (s, s, n) style memops. */
571 case BUILT_IN_BCMP:
572 case BUILT_IN_MEMCMP:
573 source0 = gimple_call_arg (call, 0);
574 source1 = gimple_call_arg (call, 1);
575 len = gimple_call_arg (call, 2);
576 break;
577
578 /* (src, dest, n) style memops. */
579 case BUILT_IN_BCOPY:
580 source0 = gimple_call_arg (call, 0);
581 dest = gimple_call_arg (call, 1);
582 len = gimple_call_arg (call, 2);
583 break;
584
585 /* (dest, src, n) style memops. */
586 case BUILT_IN_MEMCPY:
587 case BUILT_IN_MEMCPY_CHK:
588 case BUILT_IN_MEMMOVE:
589 case BUILT_IN_MEMMOVE_CHK:
590 case BUILT_IN_MEMPCPY:
591 case BUILT_IN_MEMPCPY_CHK:
592 dest = gimple_call_arg (call, 0);
593 source0 = gimple_call_arg (call, 1);
594 len = gimple_call_arg (call, 2);
595 break;
596
597 /* (dest, n) style memops. */
598 case BUILT_IN_BZERO:
599 dest = gimple_call_arg (call, 0);
600 len = gimple_call_arg (call, 1);
601 break;
602
603 /* (dest, x, n) style memops*/
604 case BUILT_IN_MEMSET:
605 case BUILT_IN_MEMSET_CHK:
606 dest = gimple_call_arg (call, 0);
607 len = gimple_call_arg (call, 2);
608 break;
609
610 case BUILT_IN_STRLEN:
611 source0 = gimple_call_arg (call, 0);
612 len = gimple_call_lhs (call);
613 break ;
614
615 /* And now the __atomic* and __sync builtins.
616 These are handled differently from the classical memory memory
617 access builtins above. */
618
619 case BUILT_IN_ATOMIC_LOAD_1:
620 case BUILT_IN_ATOMIC_LOAD_2:
621 case BUILT_IN_ATOMIC_LOAD_4:
622 case BUILT_IN_ATOMIC_LOAD_8:
623 case BUILT_IN_ATOMIC_LOAD_16:
624 is_store = false;
625 /* fall through. */
626
627 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
628 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
629 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
630 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
631 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
632
633 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
634 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
635 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
636 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
637 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
638
639 case BUILT_IN_SYNC_FETCH_AND_OR_1:
640 case BUILT_IN_SYNC_FETCH_AND_OR_2:
641 case BUILT_IN_SYNC_FETCH_AND_OR_4:
642 case BUILT_IN_SYNC_FETCH_AND_OR_8:
643 case BUILT_IN_SYNC_FETCH_AND_OR_16:
644
645 case BUILT_IN_SYNC_FETCH_AND_AND_1:
646 case BUILT_IN_SYNC_FETCH_AND_AND_2:
647 case BUILT_IN_SYNC_FETCH_AND_AND_4:
648 case BUILT_IN_SYNC_FETCH_AND_AND_8:
649 case BUILT_IN_SYNC_FETCH_AND_AND_16:
650
651 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
652 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
653 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
654 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
655 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
656
657 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
658 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
659 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
660 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
661
662 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
663 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
664 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
665 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
666 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
667
668 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
669 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
670 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
671 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
672 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
673
674 case BUILT_IN_SYNC_OR_AND_FETCH_1:
675 case BUILT_IN_SYNC_OR_AND_FETCH_2:
676 case BUILT_IN_SYNC_OR_AND_FETCH_4:
677 case BUILT_IN_SYNC_OR_AND_FETCH_8:
678 case BUILT_IN_SYNC_OR_AND_FETCH_16:
679
680 case BUILT_IN_SYNC_AND_AND_FETCH_1:
681 case BUILT_IN_SYNC_AND_AND_FETCH_2:
682 case BUILT_IN_SYNC_AND_AND_FETCH_4:
683 case BUILT_IN_SYNC_AND_AND_FETCH_8:
684 case BUILT_IN_SYNC_AND_AND_FETCH_16:
685
686 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
687 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
688 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
689 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
690 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
691
692 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
693 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
694 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
695 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
696
697 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
698 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
699 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
700 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
701 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
702
703 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
704 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
705 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
706 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
707 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
708
709 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
710 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
711 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
712 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
713 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
714
715 case BUILT_IN_SYNC_LOCK_RELEASE_1:
716 case BUILT_IN_SYNC_LOCK_RELEASE_2:
717 case BUILT_IN_SYNC_LOCK_RELEASE_4:
718 case BUILT_IN_SYNC_LOCK_RELEASE_8:
719 case BUILT_IN_SYNC_LOCK_RELEASE_16:
720
721 case BUILT_IN_ATOMIC_EXCHANGE_1:
722 case BUILT_IN_ATOMIC_EXCHANGE_2:
723 case BUILT_IN_ATOMIC_EXCHANGE_4:
724 case BUILT_IN_ATOMIC_EXCHANGE_8:
725 case BUILT_IN_ATOMIC_EXCHANGE_16:
726
727 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
728 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
729 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
730 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
731 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
732
733 case BUILT_IN_ATOMIC_STORE_1:
734 case BUILT_IN_ATOMIC_STORE_2:
735 case BUILT_IN_ATOMIC_STORE_4:
736 case BUILT_IN_ATOMIC_STORE_8:
737 case BUILT_IN_ATOMIC_STORE_16:
738
739 case BUILT_IN_ATOMIC_ADD_FETCH_1:
740 case BUILT_IN_ATOMIC_ADD_FETCH_2:
741 case BUILT_IN_ATOMIC_ADD_FETCH_4:
742 case BUILT_IN_ATOMIC_ADD_FETCH_8:
743 case BUILT_IN_ATOMIC_ADD_FETCH_16:
744
745 case BUILT_IN_ATOMIC_SUB_FETCH_1:
746 case BUILT_IN_ATOMIC_SUB_FETCH_2:
747 case BUILT_IN_ATOMIC_SUB_FETCH_4:
748 case BUILT_IN_ATOMIC_SUB_FETCH_8:
749 case BUILT_IN_ATOMIC_SUB_FETCH_16:
750
751 case BUILT_IN_ATOMIC_AND_FETCH_1:
752 case BUILT_IN_ATOMIC_AND_FETCH_2:
753 case BUILT_IN_ATOMIC_AND_FETCH_4:
754 case BUILT_IN_ATOMIC_AND_FETCH_8:
755 case BUILT_IN_ATOMIC_AND_FETCH_16:
756
757 case BUILT_IN_ATOMIC_NAND_FETCH_1:
758 case BUILT_IN_ATOMIC_NAND_FETCH_2:
759 case BUILT_IN_ATOMIC_NAND_FETCH_4:
760 case BUILT_IN_ATOMIC_NAND_FETCH_8:
761 case BUILT_IN_ATOMIC_NAND_FETCH_16:
762
763 case BUILT_IN_ATOMIC_XOR_FETCH_1:
764 case BUILT_IN_ATOMIC_XOR_FETCH_2:
765 case BUILT_IN_ATOMIC_XOR_FETCH_4:
766 case BUILT_IN_ATOMIC_XOR_FETCH_8:
767 case BUILT_IN_ATOMIC_XOR_FETCH_16:
768
769 case BUILT_IN_ATOMIC_OR_FETCH_1:
770 case BUILT_IN_ATOMIC_OR_FETCH_2:
771 case BUILT_IN_ATOMIC_OR_FETCH_4:
772 case BUILT_IN_ATOMIC_OR_FETCH_8:
773 case BUILT_IN_ATOMIC_OR_FETCH_16:
774
775 case BUILT_IN_ATOMIC_FETCH_ADD_1:
776 case BUILT_IN_ATOMIC_FETCH_ADD_2:
777 case BUILT_IN_ATOMIC_FETCH_ADD_4:
778 case BUILT_IN_ATOMIC_FETCH_ADD_8:
779 case BUILT_IN_ATOMIC_FETCH_ADD_16:
780
781 case BUILT_IN_ATOMIC_FETCH_SUB_1:
782 case BUILT_IN_ATOMIC_FETCH_SUB_2:
783 case BUILT_IN_ATOMIC_FETCH_SUB_4:
784 case BUILT_IN_ATOMIC_FETCH_SUB_8:
785 case BUILT_IN_ATOMIC_FETCH_SUB_16:
786
787 case BUILT_IN_ATOMIC_FETCH_AND_1:
788 case BUILT_IN_ATOMIC_FETCH_AND_2:
789 case BUILT_IN_ATOMIC_FETCH_AND_4:
790 case BUILT_IN_ATOMIC_FETCH_AND_8:
791 case BUILT_IN_ATOMIC_FETCH_AND_16:
792
793 case BUILT_IN_ATOMIC_FETCH_NAND_1:
794 case BUILT_IN_ATOMIC_FETCH_NAND_2:
795 case BUILT_IN_ATOMIC_FETCH_NAND_4:
796 case BUILT_IN_ATOMIC_FETCH_NAND_8:
797 case BUILT_IN_ATOMIC_FETCH_NAND_16:
798
799 case BUILT_IN_ATOMIC_FETCH_XOR_1:
800 case BUILT_IN_ATOMIC_FETCH_XOR_2:
801 case BUILT_IN_ATOMIC_FETCH_XOR_4:
802 case BUILT_IN_ATOMIC_FETCH_XOR_8:
803 case BUILT_IN_ATOMIC_FETCH_XOR_16:
804
805 case BUILT_IN_ATOMIC_FETCH_OR_1:
806 case BUILT_IN_ATOMIC_FETCH_OR_2:
807 case BUILT_IN_ATOMIC_FETCH_OR_4:
808 case BUILT_IN_ATOMIC_FETCH_OR_8:
809 case BUILT_IN_ATOMIC_FETCH_OR_16:
810 {
811 dest = gimple_call_arg (call, 0);
812 /* DEST represents the address of a memory location.
813 instrument_derefs wants the memory location, so lets
814 dereference the address DEST before handing it to
815 instrument_derefs. */
816 if (TREE_CODE (dest) == ADDR_EXPR)
817 dest = TREE_OPERAND (dest, 0);
818 else if (TREE_CODE (dest) == SSA_NAME || TREE_CODE (dest) == INTEGER_CST)
819 dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
820 dest, build_int_cst (TREE_TYPE (dest), 0));
821 else
822 gcc_unreachable ();
823
824 access_size = int_size_in_bytes (TREE_TYPE (dest));
825 }
826
827 default:
828 /* The other builtins memory access are not instrumented in this
829 function because they either don't have any length parameter,
830 or their length parameter is just a limit. */
831 break;
832 }
833
834 if (len != NULL_TREE)
835 {
836 if (source0 != NULL_TREE)
837 {
838 src0->start = source0;
839 src0->access_size = access_size;
840 *src0_len = len;
841 *src0_is_store = false;
842 }
843
844 if (source1 != NULL_TREE)
845 {
846 src1->start = source1;
847 src1->access_size = access_size;
848 *src1_len = len;
849 *src1_is_store = false;
850 }
851
852 if (dest != NULL_TREE)
853 {
854 dst->start = dest;
855 dst->access_size = access_size;
856 *dst_len = len;
857 *dst_is_store = true;
858 }
859
860 got_reference_p = true;
861 }
862 else if (dest)
863 {
864 dst->start = dest;
865 dst->access_size = access_size;
866 *dst_len = NULL_TREE;
867 *dst_is_store = is_store;
868 *dest_is_deref = true;
869 got_reference_p = true;
870 }
871
872 return got_reference_p;
873 }
874
875 /* Return true iff a given gimple statement has been instrumented.
876 Note that the statement is "defined" by the memory references it
877 contains. */
878
879 static bool
880 has_stmt_been_instrumented_p (gimple stmt)
881 {
882 if (gimple_assign_single_p (stmt))
883 {
884 bool r_is_store;
885 asan_mem_ref r;
886 asan_mem_ref_init (&r, NULL, 1);
887
888 if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
889 &r_is_store))
890 return has_mem_ref_been_instrumented (&r);
891 }
892 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
893 {
894 asan_mem_ref src0, src1, dest;
895 asan_mem_ref_init (&src0, NULL, 1);
896 asan_mem_ref_init (&src1, NULL, 1);
897 asan_mem_ref_init (&dest, NULL, 1);
898
899 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
900 bool src0_is_store = false, src1_is_store = false,
901 dest_is_store = false, dest_is_deref = false, intercepted_p = true;
902 if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
903 &src0, &src0_len, &src0_is_store,
904 &src1, &src1_len, &src1_is_store,
905 &dest, &dest_len, &dest_is_store,
906 &dest_is_deref, &intercepted_p))
907 {
908 if (src0.start != NULL_TREE
909 && !has_mem_ref_been_instrumented (&src0, src0_len))
910 return false;
911
912 if (src1.start != NULL_TREE
913 && !has_mem_ref_been_instrumented (&src1, src1_len))
914 return false;
915
916 if (dest.start != NULL_TREE
917 && !has_mem_ref_been_instrumented (&dest, dest_len))
918 return false;
919
920 return true;
921 }
922 }
923 return false;
924 }
925
926 /* Insert a memory reference into the hash table. */
927
928 static void
929 update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
930 {
931 hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
932
933 asan_mem_ref r;
934 asan_mem_ref_init (&r, ref, access_size);
935
936 asan_mem_ref **slot = ht->find_slot (&r, INSERT);
937 if (*slot == NULL || (*slot)->access_size < access_size)
938 *slot = asan_mem_ref_new (ref, access_size);
939 }
940
941 /* Initialize shadow_ptr_types array. */
942
943 static void
944 asan_init_shadow_ptr_types (void)
945 {
946 asan_shadow_set = new_alias_set ();
947 shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
948 TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
949 shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
950 shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
951 TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
952 shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
953 initialize_sanitizer_builtins ();
954 }
955
956 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
957
958 static tree
959 asan_pp_string (pretty_printer *pp)
960 {
961 const char *buf = pp_formatted_text (pp);
962 size_t len = strlen (buf);
963 tree ret = build_string (len + 1, buf);
964 TREE_TYPE (ret)
965 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
966 build_index_type (size_int (len)));
967 TREE_READONLY (ret) = 1;
968 TREE_STATIC (ret) = 1;
969 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
970 }
971
972 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
973
974 static rtx
975 asan_shadow_cst (unsigned char shadow_bytes[4])
976 {
977 int i;
978 unsigned HOST_WIDE_INT val = 0;
979 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
980 for (i = 0; i < 4; i++)
981 val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
982 << (BITS_PER_UNIT * i);
983 return gen_int_mode (val, SImode);
984 }
985
986 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
987 though. */
988
989 static void
990 asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
991 {
992 rtx_insn *insn, *insns, *jump;
993 rtx_code_label *top_label;
994 rtx end, addr, tmp;
995
996 start_sequence ();
997 clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
998 insns = get_insns ();
999 end_sequence ();
1000 for (insn = insns; insn; insn = NEXT_INSN (insn))
1001 if (CALL_P (insn))
1002 break;
1003 if (insn == NULL_RTX)
1004 {
1005 emit_insn (insns);
1006 return;
1007 }
1008
1009 gcc_assert ((len & 3) == 0);
1010 top_label = gen_label_rtx ();
1011 addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
1012 shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
1013 end = force_reg (Pmode, plus_constant (Pmode, addr, len));
1014 emit_label (top_label);
1015
1016 emit_move_insn (shadow_mem, const0_rtx);
1017 tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
1018 true, OPTAB_LIB_WIDEN);
1019 if (tmp != addr)
1020 emit_move_insn (addr, tmp);
1021 emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
1022 jump = get_last_insn ();
1023 gcc_assert (JUMP_P (jump));
1024 add_int_reg_note (jump, REG_BR_PROB, REG_BR_PROB_BASE * 80 / 100);
1025 }
1026
1027 void
1028 asan_function_start (void)
1029 {
1030 section *fnsec = function_section (current_function_decl);
1031 switch_to_section (fnsec);
1032 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
1033 current_function_funcdef_no);
1034 }
1035
1036 /* Insert code to protect stack vars. The prologue sequence should be emitted
1037 directly, epilogue sequence returned. BASE is the register holding the
1038 stack base, against which OFFSETS array offsets are relative to, OFFSETS
1039 array contains pairs of offsets in reverse order, always the end offset
1040 of some gap that needs protection followed by starting offset,
1041 and DECLS is an array of representative decls for each var partition.
1042 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1043 elements long (OFFSETS include gap before the first variable as well
1044 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
1045 register which stack vars DECL_RTLs are based on. Either BASE should be
1046 assigned to PBASE, when not doing use after return protection, or
1047 corresponding address based on __asan_stack_malloc* return value. */
1048
1049 rtx_insn *
1050 asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
1051 HOST_WIDE_INT *offsets, tree *decls, int length)
1052 {
1053 rtx shadow_base, shadow_mem, ret, mem, orig_base;
1054 rtx_code_label *lab;
1055 rtx_insn *insns;
1056 char buf[30];
1057 unsigned char shadow_bytes[4];
1058 HOST_WIDE_INT base_offset = offsets[length - 1];
1059 HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
1060 HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
1061 HOST_WIDE_INT last_offset, last_size;
1062 int l;
1063 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
1064 tree str_cst, decl, id;
1065 int use_after_return_class = -1;
1066
1067 if (shadow_ptr_types[0] == NULL_TREE)
1068 asan_init_shadow_ptr_types ();
1069
1070 /* First of all, prepare the description string. */
1071 pretty_printer asan_pp;
1072
1073 pp_decimal_int (&asan_pp, length / 2 - 1);
1074 pp_space (&asan_pp);
1075 for (l = length - 2; l; l -= 2)
1076 {
1077 tree decl = decls[l / 2 - 1];
1078 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
1079 pp_space (&asan_pp);
1080 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
1081 pp_space (&asan_pp);
1082 if (DECL_P (decl) && DECL_NAME (decl))
1083 {
1084 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
1085 pp_space (&asan_pp);
1086 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
1087 }
1088 else
1089 pp_string (&asan_pp, "9 <unknown>");
1090 pp_space (&asan_pp);
1091 }
1092 str_cst = asan_pp_string (&asan_pp);
1093
1094 /* Emit the prologue sequence. */
1095 if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
1096 && ASAN_USE_AFTER_RETURN)
1097 {
1098 use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
1099 /* __asan_stack_malloc_N guarantees alignment
1100 N < 6 ? (64 << N) : 4096 bytes. */
1101 if (alignb > (use_after_return_class < 6
1102 ? (64U << use_after_return_class) : 4096U))
1103 use_after_return_class = -1;
1104 else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
1105 base_align_bias = ((asan_frame_size + alignb - 1)
1106 & ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
1107 }
1108 /* Align base if target is STRICT_ALIGNMENT. */
1109 if (STRICT_ALIGNMENT)
1110 base = expand_binop (Pmode, and_optab, base,
1111 gen_int_mode (-((GET_MODE_ALIGNMENT (SImode)
1112 << ASAN_SHADOW_SHIFT)
1113 / BITS_PER_UNIT), Pmode), NULL_RTX,
1114 1, OPTAB_DIRECT);
1115
1116 if (use_after_return_class == -1 && pbase)
1117 emit_move_insn (pbase, base);
1118
1119 base = expand_binop (Pmode, add_optab, base,
1120 gen_int_mode (base_offset - base_align_bias, Pmode),
1121 NULL_RTX, 1, OPTAB_DIRECT);
1122 orig_base = NULL_RTX;
1123 if (use_after_return_class != -1)
1124 {
1125 if (asan_detect_stack_use_after_return == NULL_TREE)
1126 {
1127 id = get_identifier ("__asan_option_detect_stack_use_after_return");
1128 decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
1129 integer_type_node);
1130 SET_DECL_ASSEMBLER_NAME (decl, id);
1131 TREE_ADDRESSABLE (decl) = 1;
1132 DECL_ARTIFICIAL (decl) = 1;
1133 DECL_IGNORED_P (decl) = 1;
1134 DECL_EXTERNAL (decl) = 1;
1135 TREE_STATIC (decl) = 1;
1136 TREE_PUBLIC (decl) = 1;
1137 TREE_USED (decl) = 1;
1138 asan_detect_stack_use_after_return = decl;
1139 }
1140 orig_base = gen_reg_rtx (Pmode);
1141 emit_move_insn (orig_base, base);
1142 ret = expand_normal (asan_detect_stack_use_after_return);
1143 lab = gen_label_rtx ();
1144 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1145 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1146 VOIDmode, 0, lab, very_likely);
1147 snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
1148 use_after_return_class);
1149 ret = init_one_libfunc (buf);
1150 rtx addr = convert_memory_address (ptr_mode, base);
1151 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 2,
1152 GEN_INT (asan_frame_size
1153 + base_align_bias),
1154 TYPE_MODE (pointer_sized_int_node),
1155 addr, ptr_mode);
1156 ret = convert_memory_address (Pmode, ret);
1157 emit_move_insn (base, ret);
1158 emit_label (lab);
1159 emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
1160 gen_int_mode (base_align_bias
1161 - base_offset, Pmode),
1162 NULL_RTX, 1, OPTAB_DIRECT));
1163 }
1164 mem = gen_rtx_MEM (ptr_mode, base);
1165 mem = adjust_address (mem, VOIDmode, base_align_bias);
1166 emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
1167 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1168 emit_move_insn (mem, expand_normal (str_cst));
1169 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1170 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
1171 id = get_identifier (buf);
1172 decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1173 VAR_DECL, id, char_type_node);
1174 SET_DECL_ASSEMBLER_NAME (decl, id);
1175 TREE_ADDRESSABLE (decl) = 1;
1176 TREE_READONLY (decl) = 1;
1177 DECL_ARTIFICIAL (decl) = 1;
1178 DECL_IGNORED_P (decl) = 1;
1179 TREE_STATIC (decl) = 1;
1180 TREE_PUBLIC (decl) = 0;
1181 TREE_USED (decl) = 1;
1182 DECL_INITIAL (decl) = decl;
1183 TREE_ASM_WRITTEN (decl) = 1;
1184 TREE_ASM_WRITTEN (id) = 1;
1185 emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
1186 shadow_base = expand_binop (Pmode, lshr_optab, base,
1187 GEN_INT (ASAN_SHADOW_SHIFT),
1188 NULL_RTX, 1, OPTAB_DIRECT);
1189 shadow_base
1190 = plus_constant (Pmode, shadow_base,
1191 asan_shadow_offset ()
1192 + (base_align_bias >> ASAN_SHADOW_SHIFT));
1193 gcc_assert (asan_shadow_set != -1
1194 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1195 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
1196 set_mem_alias_set (shadow_mem, asan_shadow_set);
1197 if (STRICT_ALIGNMENT)
1198 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1199 prev_offset = base_offset;
1200 for (l = length; l; l -= 2)
1201 {
1202 if (l == 2)
1203 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1204 offset = offsets[l - 1];
1205 if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
1206 {
1207 int i;
1208 HOST_WIDE_INT aoff
1209 = base_offset + ((offset - base_offset)
1210 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1211 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1212 (aoff - prev_offset)
1213 >> ASAN_SHADOW_SHIFT);
1214 prev_offset = aoff;
1215 for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
1216 if (aoff < offset)
1217 {
1218 if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
1219 shadow_bytes[i] = 0;
1220 else
1221 shadow_bytes[i] = offset - aoff;
1222 }
1223 else
1224 shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
1225 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1226 offset = aoff;
1227 }
1228 while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
1229 {
1230 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1231 (offset - prev_offset)
1232 >> ASAN_SHADOW_SHIFT);
1233 prev_offset = offset;
1234 memset (shadow_bytes, cur_shadow_byte, 4);
1235 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1236 offset += ASAN_RED_ZONE_SIZE;
1237 }
1238 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1239 }
1240 do_pending_stack_adjust ();
1241
1242 /* Construct epilogue sequence. */
1243 start_sequence ();
1244
1245 lab = NULL;
1246 if (use_after_return_class != -1)
1247 {
1248 rtx_code_label *lab2 = gen_label_rtx ();
1249 char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
1250 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1251 emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
1252 VOIDmode, 0, lab2, very_likely);
1253 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1254 set_mem_alias_set (shadow_mem, asan_shadow_set);
1255 mem = gen_rtx_MEM (ptr_mode, base);
1256 mem = adjust_address (mem, VOIDmode, base_align_bias);
1257 emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
1258 unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
1259 if (use_after_return_class < 5
1260 && can_store_by_pieces (sz, builtin_memset_read_str, &c,
1261 BITS_PER_UNIT, true))
1262 store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
1263 BITS_PER_UNIT, true, 0);
1264 else if (use_after_return_class >= 5
1265 || !set_storage_via_setmem (shadow_mem,
1266 GEN_INT (sz),
1267 gen_int_mode (c, QImode),
1268 BITS_PER_UNIT, BITS_PER_UNIT,
1269 -1, sz, sz, sz))
1270 {
1271 snprintf (buf, sizeof buf, "__asan_stack_free_%d",
1272 use_after_return_class);
1273 ret = init_one_libfunc (buf);
1274 rtx addr = convert_memory_address (ptr_mode, base);
1275 rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
1276 emit_library_call (ret, LCT_NORMAL, ptr_mode, 3, addr, ptr_mode,
1277 GEN_INT (asan_frame_size + base_align_bias),
1278 TYPE_MODE (pointer_sized_int_node),
1279 orig_addr, ptr_mode);
1280 }
1281 lab = gen_label_rtx ();
1282 emit_jump (lab);
1283 emit_label (lab2);
1284 }
1285
1286 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1287 set_mem_alias_set (shadow_mem, asan_shadow_set);
1288
1289 if (STRICT_ALIGNMENT)
1290 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1291
1292 prev_offset = base_offset;
1293 last_offset = base_offset;
1294 last_size = 0;
1295 for (l = length; l; l -= 2)
1296 {
1297 offset = base_offset + ((offsets[l - 1] - base_offset)
1298 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1299 if (last_offset + last_size != offset)
1300 {
1301 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1302 (last_offset - prev_offset)
1303 >> ASAN_SHADOW_SHIFT);
1304 prev_offset = last_offset;
1305 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1306 last_offset = offset;
1307 last_size = 0;
1308 }
1309 last_size += base_offset + ((offsets[l - 2] - base_offset)
1310 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1311 - offset;
1312 }
1313 if (last_size)
1314 {
1315 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1316 (last_offset - prev_offset)
1317 >> ASAN_SHADOW_SHIFT);
1318 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1319 }
1320
1321 do_pending_stack_adjust ();
1322 if (lab)
1323 emit_label (lab);
1324
1325 insns = get_insns ();
1326 end_sequence ();
1327 return insns;
1328 }
1329
1330 /* Return true if DECL, a global var, might be overridden and needs
1331 therefore a local alias. */
1332
1333 static bool
1334 asan_needs_local_alias (tree decl)
1335 {
1336 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1337 }
1338
1339 /* Return true if DECL is a VAR_DECL that should be protected
1340 by Address Sanitizer, by appending a red zone with protected
1341 shadow memory after it and aligning it to at least
1342 ASAN_RED_ZONE_SIZE bytes. */
1343
1344 bool
1345 asan_protect_global (tree decl)
1346 {
1347 if (!ASAN_GLOBALS)
1348 return false;
1349
1350 rtx rtl, symbol;
1351
1352 if (TREE_CODE (decl) == STRING_CST)
1353 {
1354 /* Instrument all STRING_CSTs except those created
1355 by asan_pp_string here. */
1356 if (shadow_ptr_types[0] != NULL_TREE
1357 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1358 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1359 return false;
1360 return true;
1361 }
1362 if (TREE_CODE (decl) != VAR_DECL
1363 /* TLS vars aren't statically protectable. */
1364 || DECL_THREAD_LOCAL_P (decl)
1365 /* Externs will be protected elsewhere. */
1366 || DECL_EXTERNAL (decl)
1367 || !DECL_RTL_SET_P (decl)
1368 /* Comdat vars pose an ABI problem, we can't know if
1369 the var that is selected by the linker will have
1370 padding or not. */
1371 || DECL_ONE_ONLY (decl)
1372 /* Similarly for common vars. People can use -fno-common.
1373 Note: Linux kernel is built with -fno-common, so we do instrument
1374 globals there even if it is C. */
1375 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
1376 /* Don't protect if using user section, often vars placed
1377 into user section from multiple TUs are then assumed
1378 to be an array of such vars, putting padding in there
1379 breaks this assumption. */
1380 || (DECL_SECTION_NAME (decl) != NULL
1381 && !symtab_node::get (decl)->implicit_section
1382 && !section_sanitized_p (DECL_SECTION_NAME (decl)))
1383 || DECL_SIZE (decl) == 0
1384 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1385 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1386 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
1387 || TREE_TYPE (decl) == ubsan_get_source_location_type ())
1388 return false;
1389
1390 rtl = DECL_RTL (decl);
1391 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1392 return false;
1393 symbol = XEXP (rtl, 0);
1394
1395 if (CONSTANT_POOL_ADDRESS_P (symbol)
1396 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1397 return false;
1398
1399 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1400 return false;
1401
1402 #ifndef ASM_OUTPUT_DEF
1403 if (asan_needs_local_alias (decl))
1404 return false;
1405 #endif
1406
1407 return true;
1408 }
1409
1410 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1411 IS_STORE is either 1 (for a store) or 0 (for a load). */
1412
1413 static tree
1414 report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1415 int *nargs)
1416 {
1417 static enum built_in_function report[2][2][6]
1418 = { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1419 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1420 BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
1421 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1422 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1423 BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } },
1424 { { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT,
1425 BUILT_IN_ASAN_REPORT_LOAD2_NOABORT,
1426 BUILT_IN_ASAN_REPORT_LOAD4_NOABORT,
1427 BUILT_IN_ASAN_REPORT_LOAD8_NOABORT,
1428 BUILT_IN_ASAN_REPORT_LOAD16_NOABORT,
1429 BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT },
1430 { BUILT_IN_ASAN_REPORT_STORE1_NOABORT,
1431 BUILT_IN_ASAN_REPORT_STORE2_NOABORT,
1432 BUILT_IN_ASAN_REPORT_STORE4_NOABORT,
1433 BUILT_IN_ASAN_REPORT_STORE8_NOABORT,
1434 BUILT_IN_ASAN_REPORT_STORE16_NOABORT,
1435 BUILT_IN_ASAN_REPORT_STORE_N_NOABORT } } };
1436 if (size_in_bytes == -1)
1437 {
1438 *nargs = 2;
1439 return builtin_decl_implicit (report[recover_p][is_store][5]);
1440 }
1441 *nargs = 1;
1442 int size_log2 = exact_log2 (size_in_bytes);
1443 return builtin_decl_implicit (report[recover_p][is_store][size_log2]);
1444 }
1445
1446 /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1447 IS_STORE is either 1 (for a store) or 0 (for a load). */
1448
1449 static tree
1450 check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1451 int *nargs)
1452 {
1453 static enum built_in_function check[2][2][6]
1454 = { { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
1455 BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
1456 BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
1457 { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
1458 BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
1459 BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } },
1460 { { BUILT_IN_ASAN_LOAD1_NOABORT,
1461 BUILT_IN_ASAN_LOAD2_NOABORT,
1462 BUILT_IN_ASAN_LOAD4_NOABORT,
1463 BUILT_IN_ASAN_LOAD8_NOABORT,
1464 BUILT_IN_ASAN_LOAD16_NOABORT,
1465 BUILT_IN_ASAN_LOADN_NOABORT },
1466 { BUILT_IN_ASAN_STORE1_NOABORT,
1467 BUILT_IN_ASAN_STORE2_NOABORT,
1468 BUILT_IN_ASAN_STORE4_NOABORT,
1469 BUILT_IN_ASAN_STORE8_NOABORT,
1470 BUILT_IN_ASAN_STORE16_NOABORT,
1471 BUILT_IN_ASAN_STOREN_NOABORT } } };
1472 if (size_in_bytes == -1)
1473 {
1474 *nargs = 2;
1475 return builtin_decl_implicit (check[recover_p][is_store][5]);
1476 }
1477 *nargs = 1;
1478 int size_log2 = exact_log2 (size_in_bytes);
1479 return builtin_decl_implicit (check[recover_p][is_store][size_log2]);
1480 }
1481
1482 /* Split the current basic block and create a condition statement
1483 insertion point right before or after the statement pointed to by
1484 ITER. Return an iterator to the point at which the caller might
1485 safely insert the condition statement.
1486
1487 THEN_BLOCK must be set to the address of an uninitialized instance
1488 of basic_block. The function will then set *THEN_BLOCK to the
1489 'then block' of the condition statement to be inserted by the
1490 caller.
1491
1492 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1493 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1494
1495 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1496 block' of the condition statement to be inserted by the caller.
1497
1498 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1499 statements starting from *ITER, and *THEN_BLOCK is a new empty
1500 block.
1501
1502 *ITER is adjusted to point to always point to the first statement
1503 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1504 same as what ITER was pointing to prior to calling this function,
1505 if BEFORE_P is true; otherwise, it is its following statement. */
1506
1507 gimple_stmt_iterator
1508 create_cond_insert_point (gimple_stmt_iterator *iter,
1509 bool before_p,
1510 bool then_more_likely_p,
1511 bool create_then_fallthru_edge,
1512 basic_block *then_block,
1513 basic_block *fallthrough_block)
1514 {
1515 gimple_stmt_iterator gsi = *iter;
1516
1517 if (!gsi_end_p (gsi) && before_p)
1518 gsi_prev (&gsi);
1519
1520 basic_block cur_bb = gsi_bb (*iter);
1521
1522 edge e = split_block (cur_bb, gsi_stmt (gsi));
1523
1524 /* Get a hold on the 'condition block', the 'then block' and the
1525 'else block'. */
1526 basic_block cond_bb = e->src;
1527 basic_block fallthru_bb = e->dest;
1528 basic_block then_bb = create_empty_bb (cond_bb);
1529 if (current_loops)
1530 {
1531 add_bb_to_loop (then_bb, cond_bb->loop_father);
1532 loops_state_set (LOOPS_NEED_FIXUP);
1533 }
1534
1535 /* Set up the newly created 'then block'. */
1536 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1537 int fallthrough_probability
1538 = then_more_likely_p
1539 ? PROB_VERY_UNLIKELY
1540 : PROB_ALWAYS - PROB_VERY_UNLIKELY;
1541 e->probability = PROB_ALWAYS - fallthrough_probability;
1542 if (create_then_fallthru_edge)
1543 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1544
1545 /* Set up the fallthrough basic block. */
1546 e = find_edge (cond_bb, fallthru_bb);
1547 e->flags = EDGE_FALSE_VALUE;
1548 e->count = cond_bb->count;
1549 e->probability = fallthrough_probability;
1550
1551 /* Update dominance info for the newly created then_bb; note that
1552 fallthru_bb's dominance info has already been updated by
1553 split_bock. */
1554 if (dom_info_available_p (CDI_DOMINATORS))
1555 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1556
1557 *then_block = then_bb;
1558 *fallthrough_block = fallthru_bb;
1559 *iter = gsi_start_bb (fallthru_bb);
1560
1561 return gsi_last_bb (cond_bb);
1562 }
1563
1564 /* Insert an if condition followed by a 'then block' right before the
1565 statement pointed to by ITER. The fallthrough block -- which is the
1566 else block of the condition as well as the destination of the
1567 outcoming edge of the 'then block' -- starts with the statement
1568 pointed to by ITER.
1569
1570 COND is the condition of the if.
1571
1572 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1573 'then block' is higher than the probability of the edge to the
1574 fallthrough block.
1575
1576 Upon completion of the function, *THEN_BB is set to the newly
1577 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1578 fallthrough block.
1579
1580 *ITER is adjusted to still point to the same statement it was
1581 pointing to initially. */
1582
1583 static void
1584 insert_if_then_before_iter (gcond *cond,
1585 gimple_stmt_iterator *iter,
1586 bool then_more_likely_p,
1587 basic_block *then_bb,
1588 basic_block *fallthrough_bb)
1589 {
1590 gimple_stmt_iterator cond_insert_point =
1591 create_cond_insert_point (iter,
1592 /*before_p=*/true,
1593 then_more_likely_p,
1594 /*create_then_fallthru_edge=*/true,
1595 then_bb,
1596 fallthrough_bb);
1597 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
1598 }
1599
1600 /* Build
1601 (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset (). */
1602
1603 static tree
1604 build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
1605 tree base_addr, tree shadow_ptr_type)
1606 {
1607 tree t, uintptr_type = TREE_TYPE (base_addr);
1608 tree shadow_type = TREE_TYPE (shadow_ptr_type);
1609 gimple g;
1610
1611 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
1612 g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
1613 base_addr, t);
1614 gimple_set_location (g, location);
1615 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1616
1617 t = build_int_cst (uintptr_type, asan_shadow_offset ());
1618 g = gimple_build_assign (make_ssa_name (uintptr_type), PLUS_EXPR,
1619 gimple_assign_lhs (g), t);
1620 gimple_set_location (g, location);
1621 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1622
1623 g = gimple_build_assign (make_ssa_name (shadow_ptr_type), NOP_EXPR,
1624 gimple_assign_lhs (g));
1625 gimple_set_location (g, location);
1626 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1627
1628 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
1629 build_int_cst (shadow_ptr_type, 0));
1630 g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
1631 gimple_set_location (g, location);
1632 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1633 return gimple_assign_lhs (g);
1634 }
1635
1636 /* BASE can already be an SSA_NAME; in that case, do not create a
1637 new SSA_NAME for it. */
1638
1639 static tree
1640 maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
1641 bool before_p)
1642 {
1643 if (TREE_CODE (base) == SSA_NAME)
1644 return base;
1645 gimple g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)),
1646 TREE_CODE (base), base);
1647 gimple_set_location (g, loc);
1648 if (before_p)
1649 gsi_insert_before (iter, g, GSI_SAME_STMT);
1650 else
1651 gsi_insert_after (iter, g, GSI_NEW_STMT);
1652 return gimple_assign_lhs (g);
1653 }
1654
1655 /* LEN can already have necessary size and precision;
1656 in that case, do not create a new variable. */
1657
1658 tree
1659 maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
1660 bool before_p)
1661 {
1662 if (ptrofftype_p (len))
1663 return len;
1664 gimple g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
1665 NOP_EXPR, len);
1666 gimple_set_location (g, loc);
1667 if (before_p)
1668 gsi_insert_before (iter, g, GSI_SAME_STMT);
1669 else
1670 gsi_insert_after (iter, g, GSI_NEW_STMT);
1671 return gimple_assign_lhs (g);
1672 }
1673
1674 /* Instrument the memory access instruction BASE. Insert new
1675 statements before or after ITER.
1676
1677 Note that the memory access represented by BASE can be either an
1678 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1679 location. IS_STORE is TRUE for a store, FALSE for a load.
1680 BEFORE_P is TRUE for inserting the instrumentation code before
1681 ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
1682 for a scalar memory access and FALSE for memory region access.
1683 NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
1684 length. ALIGN tells alignment of accessed memory object.
1685
1686 START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
1687 memory region have already been instrumented.
1688
1689 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1690 statement it was pointing to prior to calling this function,
1691 otherwise, it points to the statement logically following it. */
1692
1693 static void
1694 build_check_stmt (location_t loc, tree base, tree len,
1695 HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
1696 bool is_non_zero_len, bool before_p, bool is_store,
1697 bool is_scalar_access, unsigned int align = 0)
1698 {
1699 gimple_stmt_iterator gsi = *iter;
1700 gimple g;
1701
1702 gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
1703
1704 gsi = *iter;
1705
1706 base = unshare_expr (base);
1707 base = maybe_create_ssa_name (loc, base, &gsi, before_p);
1708
1709 if (len)
1710 {
1711 len = unshare_expr (len);
1712 len = maybe_cast_to_ptrmode (loc, len, iter, before_p);
1713 }
1714 else
1715 {
1716 gcc_assert (size_in_bytes != -1);
1717 len = build_int_cst (pointer_sized_int_node, size_in_bytes);
1718 }
1719
1720 if (size_in_bytes > 1)
1721 {
1722 if ((size_in_bytes & (size_in_bytes - 1)) != 0
1723 || size_in_bytes > 16)
1724 is_scalar_access = false;
1725 else if (align && align < size_in_bytes * BITS_PER_UNIT)
1726 {
1727 /* On non-strict alignment targets, if
1728 16-byte access is just 8-byte aligned,
1729 this will result in misaligned shadow
1730 memory 2 byte load, but otherwise can
1731 be handled using one read. */
1732 if (size_in_bytes != 16
1733 || STRICT_ALIGNMENT
1734 || align < 8 * BITS_PER_UNIT)
1735 is_scalar_access = false;
1736 }
1737 }
1738
1739 HOST_WIDE_INT flags = 0;
1740 if (is_store)
1741 flags |= ASAN_CHECK_STORE;
1742 if (is_non_zero_len)
1743 flags |= ASAN_CHECK_NON_ZERO_LEN;
1744 if (is_scalar_access)
1745 flags |= ASAN_CHECK_SCALAR_ACCESS;
1746
1747 g = gimple_build_call_internal (IFN_ASAN_CHECK, 4,
1748 build_int_cst (integer_type_node, flags),
1749 base, len,
1750 build_int_cst (integer_type_node,
1751 align / BITS_PER_UNIT));
1752 gimple_set_location (g, loc);
1753 if (before_p)
1754 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
1755 else
1756 {
1757 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1758 gsi_next (&gsi);
1759 *iter = gsi;
1760 }
1761 }
1762
1763 /* If T represents a memory access, add instrumentation code before ITER.
1764 LOCATION is source code location.
1765 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
1766
1767 static void
1768 instrument_derefs (gimple_stmt_iterator *iter, tree t,
1769 location_t location, bool is_store)
1770 {
1771 if (is_store && !ASAN_INSTRUMENT_WRITES)
1772 return;
1773 if (!is_store && !ASAN_INSTRUMENT_READS)
1774 return;
1775
1776 tree type, base;
1777 HOST_WIDE_INT size_in_bytes;
1778
1779 type = TREE_TYPE (t);
1780 switch (TREE_CODE (t))
1781 {
1782 case ARRAY_REF:
1783 case COMPONENT_REF:
1784 case INDIRECT_REF:
1785 case MEM_REF:
1786 case VAR_DECL:
1787 case BIT_FIELD_REF:
1788 break;
1789 /* FALLTHRU */
1790 default:
1791 return;
1792 }
1793
1794 size_in_bytes = int_size_in_bytes (type);
1795 if (size_in_bytes <= 0)
1796 return;
1797
1798 HOST_WIDE_INT bitsize, bitpos;
1799 tree offset;
1800 machine_mode mode;
1801 int volatilep = 0, unsignedp = 0;
1802 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset,
1803 &mode, &unsignedp, &volatilep, false);
1804
1805 if (TREE_CODE (t) == COMPONENT_REF
1806 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
1807 {
1808 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
1809 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
1810 TREE_OPERAND (t, 0), repr,
1811 NULL_TREE), location, is_store);
1812 return;
1813 }
1814
1815 if (bitpos % BITS_PER_UNIT
1816 || bitsize != size_in_bytes * BITS_PER_UNIT)
1817 return;
1818
1819 if (TREE_CODE (inner) == VAR_DECL
1820 && offset == NULL_TREE
1821 && bitpos >= 0
1822 && DECL_SIZE (inner)
1823 && tree_fits_shwi_p (DECL_SIZE (inner))
1824 && bitpos + bitsize <= tree_to_shwi (DECL_SIZE (inner)))
1825 {
1826 if (DECL_THREAD_LOCAL_P (inner))
1827 return;
1828 if (!ASAN_GLOBALS && is_global_var (inner))
1829 return;
1830 if (!TREE_STATIC (inner))
1831 {
1832 /* Automatic vars in the current function will be always
1833 accessible. */
1834 if (decl_function_context (inner) == current_function_decl)
1835 return;
1836 }
1837 /* Always instrument external vars, they might be dynamically
1838 initialized. */
1839 else if (!DECL_EXTERNAL (inner))
1840 {
1841 /* For static vars if they are known not to be dynamically
1842 initialized, they will be always accessible. */
1843 varpool_node *vnode = varpool_node::get (inner);
1844 if (vnode && !vnode->dynamically_initialized)
1845 return;
1846 }
1847 }
1848
1849 base = build_fold_addr_expr (t);
1850 if (!has_mem_ref_been_instrumented (base, size_in_bytes))
1851 {
1852 unsigned int align = get_object_alignment (t);
1853 build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
1854 /*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true,
1855 is_store, /*is_scalar_access*/true, align);
1856 update_mem_ref_hash_table (base, size_in_bytes);
1857 update_mem_ref_hash_table (t, size_in_bytes);
1858 }
1859
1860 }
1861
1862 /* Insert a memory reference into the hash table if access length
1863 can be determined in compile time. */
1864
1865 static void
1866 maybe_update_mem_ref_hash_table (tree base, tree len)
1867 {
1868 if (!POINTER_TYPE_P (TREE_TYPE (base))
1869 || !INTEGRAL_TYPE_P (TREE_TYPE (len)))
1870 return;
1871
1872 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1873
1874 if (size_in_bytes != -1)
1875 update_mem_ref_hash_table (base, size_in_bytes);
1876 }
1877
1878 /* Instrument an access to a contiguous memory region that starts at
1879 the address pointed to by BASE, over a length of LEN (expressed in
1880 the sizeof (*BASE) bytes). ITER points to the instruction before
1881 which the instrumentation instructions must be inserted. LOCATION
1882 is the source location that the instrumentation instructions must
1883 have. If IS_STORE is true, then the memory access is a store;
1884 otherwise, it's a load. */
1885
1886 static void
1887 instrument_mem_region_access (tree base, tree len,
1888 gimple_stmt_iterator *iter,
1889 location_t location, bool is_store)
1890 {
1891 if (!POINTER_TYPE_P (TREE_TYPE (base))
1892 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
1893 || integer_zerop (len))
1894 return;
1895
1896 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1897
1898 if ((size_in_bytes == -1)
1899 || !has_mem_ref_been_instrumented (base, size_in_bytes))
1900 {
1901 build_check_stmt (location, base, len, size_in_bytes, iter,
1902 /*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true,
1903 is_store, /*is_scalar_access*/false, /*align*/0);
1904 }
1905
1906 maybe_update_mem_ref_hash_table (base, len);
1907 *iter = gsi_for_stmt (gsi_stmt (*iter));
1908 }
1909
1910 /* Instrument the call to a built-in memory access function that is
1911 pointed to by the iterator ITER.
1912
1913 Upon completion, return TRUE iff *ITER has been advanced to the
1914 statement following the one it was originally pointing to. */
1915
1916 static bool
1917 instrument_builtin_call (gimple_stmt_iterator *iter)
1918 {
1919 if (!ASAN_MEMINTRIN)
1920 return false;
1921
1922 bool iter_advanced_p = false;
1923 gcall *call = as_a <gcall *> (gsi_stmt (*iter));
1924
1925 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
1926
1927 location_t loc = gimple_location (call);
1928
1929 asan_mem_ref src0, src1, dest;
1930 asan_mem_ref_init (&src0, NULL, 1);
1931 asan_mem_ref_init (&src1, NULL, 1);
1932 asan_mem_ref_init (&dest, NULL, 1);
1933
1934 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
1935 bool src0_is_store = false, src1_is_store = false, dest_is_store = false,
1936 dest_is_deref = false, intercepted_p = true;
1937
1938 if (get_mem_refs_of_builtin_call (call,
1939 &src0, &src0_len, &src0_is_store,
1940 &src1, &src1_len, &src1_is_store,
1941 &dest, &dest_len, &dest_is_store,
1942 &dest_is_deref, &intercepted_p))
1943 {
1944 if (dest_is_deref)
1945 {
1946 instrument_derefs (iter, dest.start, loc, dest_is_store);
1947 gsi_next (iter);
1948 iter_advanced_p = true;
1949 }
1950 else if (!intercepted_p
1951 && (src0_len || src1_len || dest_len))
1952 {
1953 if (src0.start != NULL_TREE)
1954 instrument_mem_region_access (src0.start, src0_len,
1955 iter, loc, /*is_store=*/false);
1956 if (src1.start != NULL_TREE)
1957 instrument_mem_region_access (src1.start, src1_len,
1958 iter, loc, /*is_store=*/false);
1959 if (dest.start != NULL_TREE)
1960 instrument_mem_region_access (dest.start, dest_len,
1961 iter, loc, /*is_store=*/true);
1962
1963 *iter = gsi_for_stmt (call);
1964 gsi_next (iter);
1965 iter_advanced_p = true;
1966 }
1967 else
1968 {
1969 if (src0.start != NULL_TREE)
1970 maybe_update_mem_ref_hash_table (src0.start, src0_len);
1971 if (src1.start != NULL_TREE)
1972 maybe_update_mem_ref_hash_table (src1.start, src1_len);
1973 if (dest.start != NULL_TREE)
1974 maybe_update_mem_ref_hash_table (dest.start, dest_len);
1975 }
1976 }
1977 return iter_advanced_p;
1978 }
1979
1980 /* Instrument the assignment statement ITER if it is subject to
1981 instrumentation. Return TRUE iff instrumentation actually
1982 happened. In that case, the iterator ITER is advanced to the next
1983 logical expression following the one initially pointed to by ITER,
1984 and the relevant memory reference that which access has been
1985 instrumented is added to the memory references hash table. */
1986
1987 static bool
1988 maybe_instrument_assignment (gimple_stmt_iterator *iter)
1989 {
1990 gimple s = gsi_stmt (*iter);
1991
1992 gcc_assert (gimple_assign_single_p (s));
1993
1994 tree ref_expr = NULL_TREE;
1995 bool is_store, is_instrumented = false;
1996
1997 if (gimple_store_p (s))
1998 {
1999 ref_expr = gimple_assign_lhs (s);
2000 is_store = true;
2001 instrument_derefs (iter, ref_expr,
2002 gimple_location (s),
2003 is_store);
2004 is_instrumented = true;
2005 }
2006
2007 if (gimple_assign_load_p (s))
2008 {
2009 ref_expr = gimple_assign_rhs1 (s);
2010 is_store = false;
2011 instrument_derefs (iter, ref_expr,
2012 gimple_location (s),
2013 is_store);
2014 is_instrumented = true;
2015 }
2016
2017 if (is_instrumented)
2018 gsi_next (iter);
2019
2020 return is_instrumented;
2021 }
2022
2023 /* Instrument the function call pointed to by the iterator ITER, if it
2024 is subject to instrumentation. At the moment, the only function
2025 calls that are instrumented are some built-in functions that access
2026 memory. Look at instrument_builtin_call to learn more.
2027
2028 Upon completion return TRUE iff *ITER was advanced to the statement
2029 following the one it was originally pointing to. */
2030
2031 static bool
2032 maybe_instrument_call (gimple_stmt_iterator *iter)
2033 {
2034 gimple stmt = gsi_stmt (*iter);
2035 bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
2036
2037 if (is_builtin && instrument_builtin_call (iter))
2038 return true;
2039
2040 if (gimple_call_noreturn_p (stmt))
2041 {
2042 if (is_builtin)
2043 {
2044 tree callee = gimple_call_fndecl (stmt);
2045 switch (DECL_FUNCTION_CODE (callee))
2046 {
2047 case BUILT_IN_UNREACHABLE:
2048 case BUILT_IN_TRAP:
2049 /* Don't instrument these. */
2050 return false;
2051 default:
2052 break;
2053 }
2054 }
2055 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
2056 gimple g = gimple_build_call (decl, 0);
2057 gimple_set_location (g, gimple_location (stmt));
2058 gsi_insert_before (iter, g, GSI_SAME_STMT);
2059 }
2060 return false;
2061 }
2062
2063 /* Walk each instruction of all basic block and instrument those that
2064 represent memory references: loads, stores, or function calls.
2065 In a given basic block, this function avoids instrumenting memory
2066 references that have already been instrumented. */
2067
2068 static void
2069 transform_statements (void)
2070 {
2071 basic_block bb, last_bb = NULL;
2072 gimple_stmt_iterator i;
2073 int saved_last_basic_block = last_basic_block_for_fn (cfun);
2074
2075 FOR_EACH_BB_FN (bb, cfun)
2076 {
2077 basic_block prev_bb = bb;
2078
2079 if (bb->index >= saved_last_basic_block) continue;
2080
2081 /* Flush the mem ref hash table, if current bb doesn't have
2082 exactly one predecessor, or if that predecessor (skipping
2083 over asan created basic blocks) isn't the last processed
2084 basic block. Thus we effectively flush on extended basic
2085 block boundaries. */
2086 while (single_pred_p (prev_bb))
2087 {
2088 prev_bb = single_pred (prev_bb);
2089 if (prev_bb->index < saved_last_basic_block)
2090 break;
2091 }
2092 if (prev_bb != last_bb)
2093 empty_mem_ref_hash_table ();
2094 last_bb = bb;
2095
2096 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
2097 {
2098 gimple s = gsi_stmt (i);
2099
2100 if (has_stmt_been_instrumented_p (s))
2101 gsi_next (&i);
2102 else if (gimple_assign_single_p (s)
2103 && !gimple_clobber_p (s)
2104 && maybe_instrument_assignment (&i))
2105 /* Nothing to do as maybe_instrument_assignment advanced
2106 the iterator I. */;
2107 else if (is_gimple_call (s) && maybe_instrument_call (&i))
2108 /* Nothing to do as maybe_instrument_call
2109 advanced the iterator I. */;
2110 else
2111 {
2112 /* No instrumentation happened.
2113
2114 If the current instruction is a function call that
2115 might free something, let's forget about the memory
2116 references that got instrumented. Otherwise we might
2117 miss some instrumentation opportunities. */
2118 if (is_gimple_call (s) && !nonfreeing_call_p (s))
2119 empty_mem_ref_hash_table ();
2120
2121 gsi_next (&i);
2122 }
2123 }
2124 }
2125 free_mem_ref_resources ();
2126 }
2127
2128 /* Build
2129 __asan_before_dynamic_init (module_name)
2130 or
2131 __asan_after_dynamic_init ()
2132 call. */
2133
2134 tree
2135 asan_dynamic_init_call (bool after_p)
2136 {
2137 tree fn = builtin_decl_implicit (after_p
2138 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2139 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
2140 tree module_name_cst = NULL_TREE;
2141 if (!after_p)
2142 {
2143 pretty_printer module_name_pp;
2144 pp_string (&module_name_pp, main_input_filename);
2145
2146 if (shadow_ptr_types[0] == NULL_TREE)
2147 asan_init_shadow_ptr_types ();
2148 module_name_cst = asan_pp_string (&module_name_pp);
2149 module_name_cst = fold_convert (const_ptr_type_node,
2150 module_name_cst);
2151 }
2152
2153 return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
2154 }
2155
2156 /* Build
2157 struct __asan_global
2158 {
2159 const void *__beg;
2160 uptr __size;
2161 uptr __size_with_redzone;
2162 const void *__name;
2163 const void *__module_name;
2164 uptr __has_dynamic_init;
2165 __asan_global_source_location *__location;
2166 } type. */
2167
2168 static tree
2169 asan_global_struct (void)
2170 {
2171 static const char *field_names[7]
2172 = { "__beg", "__size", "__size_with_redzone",
2173 "__name", "__module_name", "__has_dynamic_init", "__location"};
2174 tree fields[7], ret;
2175 int i;
2176
2177 ret = make_node (RECORD_TYPE);
2178 for (i = 0; i < 7; i++)
2179 {
2180 fields[i]
2181 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2182 get_identifier (field_names[i]),
2183 (i == 0 || i == 3) ? const_ptr_type_node
2184 : pointer_sized_int_node);
2185 DECL_CONTEXT (fields[i]) = ret;
2186 if (i)
2187 DECL_CHAIN (fields[i - 1]) = fields[i];
2188 }
2189 tree type_decl = build_decl (input_location, TYPE_DECL,
2190 get_identifier ("__asan_global"), ret);
2191 DECL_IGNORED_P (type_decl) = 1;
2192 DECL_ARTIFICIAL (type_decl) = 1;
2193 TYPE_FIELDS (ret) = fields[0];
2194 TYPE_NAME (ret) = type_decl;
2195 TYPE_STUB_DECL (ret) = type_decl;
2196 layout_type (ret);
2197 return ret;
2198 }
2199
2200 /* Append description of a single global DECL into vector V.
2201 TYPE is __asan_global struct type as returned by asan_global_struct. */
2202
2203 static void
2204 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
2205 {
2206 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2207 unsigned HOST_WIDE_INT size;
2208 tree str_cst, module_name_cst, refdecl = decl;
2209 vec<constructor_elt, va_gc> *vinner = NULL;
2210
2211 pretty_printer asan_pp, module_name_pp;
2212
2213 if (DECL_NAME (decl))
2214 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
2215 else
2216 pp_string (&asan_pp, "<unknown>");
2217 str_cst = asan_pp_string (&asan_pp);
2218
2219 pp_string (&module_name_pp, main_input_filename);
2220 module_name_cst = asan_pp_string (&module_name_pp);
2221
2222 if (asan_needs_local_alias (decl))
2223 {
2224 char buf[20];
2225 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
2226 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2227 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2228 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2229 TREE_READONLY (refdecl) = TREE_READONLY (decl);
2230 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2231 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2232 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2233 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2234 TREE_STATIC (refdecl) = 1;
2235 TREE_PUBLIC (refdecl) = 0;
2236 TREE_USED (refdecl) = 1;
2237 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2238 }
2239
2240 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2241 fold_convert (const_ptr_type_node,
2242 build_fold_addr_expr (refdecl)));
2243 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2244 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2245 size += asan_red_zone_size (size);
2246 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2247 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2248 fold_convert (const_ptr_type_node, str_cst));
2249 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2250 fold_convert (const_ptr_type_node, module_name_cst));
2251 varpool_node *vnode = varpool_node::get (decl);
2252 int has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
2253 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2254 build_int_cst (uptr, has_dynamic_init));
2255 tree locptr = NULL_TREE;
2256 location_t loc = DECL_SOURCE_LOCATION (decl);
2257 expanded_location xloc = expand_location (loc);
2258 if (xloc.file != NULL)
2259 {
2260 static int lasanloccnt = 0;
2261 char buf[25];
2262 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt);
2263 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2264 ubsan_get_source_location_type ());
2265 TREE_STATIC (var) = 1;
2266 TREE_PUBLIC (var) = 0;
2267 DECL_ARTIFICIAL (var) = 1;
2268 DECL_IGNORED_P (var) = 1;
2269 pretty_printer filename_pp;
2270 pp_string (&filename_pp, xloc.file);
2271 tree str = asan_pp_string (&filename_pp);
2272 tree ctor = build_constructor_va (TREE_TYPE (var), 3,
2273 NULL_TREE, str, NULL_TREE,
2274 build_int_cst (unsigned_type_node,
2275 xloc.line), NULL_TREE,
2276 build_int_cst (unsigned_type_node,
2277 xloc.column));
2278 TREE_CONSTANT (ctor) = 1;
2279 TREE_STATIC (ctor) = 1;
2280 DECL_INITIAL (var) = ctor;
2281 varpool_node::finalize_decl (var);
2282 locptr = fold_convert (uptr, build_fold_addr_expr (var));
2283 }
2284 else
2285 locptr = build_int_cst (uptr, 0);
2286 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr);
2287 init = build_constructor (type, vinner);
2288 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2289 }
2290
2291 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2292 void
2293 initialize_sanitizer_builtins (void)
2294 {
2295 tree decl;
2296
2297 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2298 return;
2299
2300 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2301 tree BT_FN_VOID_PTR
2302 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2303 tree BT_FN_VOID_CONST_PTR
2304 = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
2305 tree BT_FN_VOID_PTR_PTR
2306 = build_function_type_list (void_type_node, ptr_type_node,
2307 ptr_type_node, NULL_TREE);
2308 tree BT_FN_VOID_PTR_PTR_PTR
2309 = build_function_type_list (void_type_node, ptr_type_node,
2310 ptr_type_node, ptr_type_node, NULL_TREE);
2311 tree BT_FN_VOID_PTR_PTRMODE
2312 = build_function_type_list (void_type_node, ptr_type_node,
2313 pointer_sized_int_node, NULL_TREE);
2314 tree BT_FN_VOID_INT
2315 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
2316 tree BT_FN_SIZE_CONST_PTR_INT
2317 = build_function_type_list (size_type_node, const_ptr_type_node,
2318 integer_type_node, NULL_TREE);
2319 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2320 tree BT_FN_IX_CONST_VPTR_INT[5];
2321 tree BT_FN_IX_VPTR_IX_INT[5];
2322 tree BT_FN_VOID_VPTR_IX_INT[5];
2323 tree vptr
2324 = build_pointer_type (build_qualified_type (void_type_node,
2325 TYPE_QUAL_VOLATILE));
2326 tree cvptr
2327 = build_pointer_type (build_qualified_type (void_type_node,
2328 TYPE_QUAL_VOLATILE
2329 |TYPE_QUAL_CONST));
2330 tree boolt
2331 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2332 int i;
2333 for (i = 0; i < 5; i++)
2334 {
2335 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2336 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2337 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
2338 integer_type_node, integer_type_node,
2339 NULL_TREE);
2340 BT_FN_IX_CONST_VPTR_INT[i]
2341 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2342 BT_FN_IX_VPTR_IX_INT[i]
2343 = build_function_type_list (ix, vptr, ix, integer_type_node,
2344 NULL_TREE);
2345 BT_FN_VOID_VPTR_IX_INT[i]
2346 = build_function_type_list (void_type_node, vptr, ix,
2347 integer_type_node, NULL_TREE);
2348 }
2349 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2350 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2351 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2352 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2353 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2354 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2355 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2356 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2357 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2358 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2359 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2360 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2361 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2362 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2363 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2364 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2365 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2366 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2367 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2368 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2369 #undef ATTR_NOTHROW_LEAF_LIST
2370 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2371 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2372 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2373 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2374 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2375 #undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2376 #define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
2377 ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
2378 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2379 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2380 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2381 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2382 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2383 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2384 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2385 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2386 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2387 #undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
2388 #define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
2389 /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2390 #undef ATTR_PURE_NOTHROW_LEAF_LIST
2391 #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
2392 #undef DEF_SANITIZER_BUILTIN
2393 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2394 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2395 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2396 set_call_expr_flags (decl, ATTRS); \
2397 set_builtin_decl (ENUM, decl, true);
2398
2399 #include "sanitizer.def"
2400
2401 /* -fsanitize=object-size uses __builtin_object_size, but that might
2402 not be available for e.g. Fortran at this point. We use
2403 DEF_SANITIZER_BUILTIN here only as a convenience macro. */
2404 if ((flag_sanitize & SANITIZE_OBJECT_SIZE)
2405 && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE))
2406 DEF_SANITIZER_BUILTIN (BUILT_IN_OBJECT_SIZE, "object_size",
2407 BT_FN_SIZE_CONST_PTR_INT,
2408 ATTR_PURE_NOTHROW_LEAF_LIST)
2409
2410 #undef DEF_SANITIZER_BUILTIN
2411 }
2412
2413 /* Called via htab_traverse. Count number of emitted
2414 STRING_CSTs in the constant hash table. */
2415
2416 int
2417 count_string_csts (constant_descriptor_tree **slot,
2418 unsigned HOST_WIDE_INT *data)
2419 {
2420 struct constant_descriptor_tree *desc = *slot;
2421 if (TREE_CODE (desc->value) == STRING_CST
2422 && TREE_ASM_WRITTEN (desc->value)
2423 && asan_protect_global (desc->value))
2424 ++*data;
2425 return 1;
2426 }
2427
2428 /* Helper structure to pass two parameters to
2429 add_string_csts. */
2430
2431 struct asan_add_string_csts_data
2432 {
2433 tree type;
2434 vec<constructor_elt, va_gc> *v;
2435 };
2436
2437 /* Called via hash_table::traverse. Call asan_add_global
2438 on emitted STRING_CSTs from the constant hash table. */
2439
2440 int
2441 add_string_csts (constant_descriptor_tree **slot,
2442 asan_add_string_csts_data *aascd)
2443 {
2444 struct constant_descriptor_tree *desc = *slot;
2445 if (TREE_CODE (desc->value) == STRING_CST
2446 && TREE_ASM_WRITTEN (desc->value)
2447 && asan_protect_global (desc->value))
2448 {
2449 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
2450 aascd->type, aascd->v);
2451 }
2452 return 1;
2453 }
2454
2455 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2456 invoke ggc_collect. */
2457 static GTY(()) tree asan_ctor_statements;
2458
2459 /* Module-level instrumentation.
2460 - Insert __asan_init_vN() into the list of CTORs.
2461 - TODO: insert redzones around globals.
2462 */
2463
2464 void
2465 asan_finish_file (void)
2466 {
2467 varpool_node *vnode;
2468 unsigned HOST_WIDE_INT gcount = 0;
2469
2470 if (shadow_ptr_types[0] == NULL_TREE)
2471 asan_init_shadow_ptr_types ();
2472 /* Avoid instrumenting code in the asan ctors/dtors.
2473 We don't need to insert padding after the description strings,
2474 nor after .LASAN* array. */
2475 flag_sanitize &= ~SANITIZE_ADDRESS;
2476
2477 /* For user-space we want asan constructors to run first.
2478 Linux kernel does not support priorities other than default, and the only
2479 other user of constructors is coverage. So we run with the default
2480 priority. */
2481 int priority = flag_sanitize & SANITIZE_USER_ADDRESS
2482 ? MAX_RESERVED_INIT_PRIORITY - 1 : DEFAULT_INIT_PRIORITY;
2483
2484 if (flag_sanitize & SANITIZE_USER_ADDRESS)
2485 {
2486 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
2487 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2488 }
2489 FOR_EACH_DEFINED_VARIABLE (vnode)
2490 if (TREE_ASM_WRITTEN (vnode->decl)
2491 && asan_protect_global (vnode->decl))
2492 ++gcount;
2493 hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab ();
2494 const_desc_htab->traverse<unsigned HOST_WIDE_INT *, count_string_csts>
2495 (&gcount);
2496 if (gcount)
2497 {
2498 tree type = asan_global_struct (), var, ctor;
2499 tree dtor_statements = NULL_TREE;
2500 vec<constructor_elt, va_gc> *v;
2501 char buf[20];
2502
2503 type = build_array_type_nelts (type, gcount);
2504 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
2505 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2506 type);
2507 TREE_STATIC (var) = 1;
2508 TREE_PUBLIC (var) = 0;
2509 DECL_ARTIFICIAL (var) = 1;
2510 DECL_IGNORED_P (var) = 1;
2511 vec_alloc (v, gcount);
2512 FOR_EACH_DEFINED_VARIABLE (vnode)
2513 if (TREE_ASM_WRITTEN (vnode->decl)
2514 && asan_protect_global (vnode->decl))
2515 asan_add_global (vnode->decl, TREE_TYPE (type), v);
2516 struct asan_add_string_csts_data aascd;
2517 aascd.type = TREE_TYPE (type);
2518 aascd.v = v;
2519 const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts>
2520 (&aascd);
2521 ctor = build_constructor (type, v);
2522 TREE_CONSTANT (ctor) = 1;
2523 TREE_STATIC (ctor) = 1;
2524 DECL_INITIAL (var) = ctor;
2525 varpool_node::finalize_decl (var);
2526
2527 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
2528 tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
2529 append_to_statement_list (build_call_expr (fn, 2,
2530 build_fold_addr_expr (var),
2531 gcount_tree),
2532 &asan_ctor_statements);
2533
2534 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
2535 append_to_statement_list (build_call_expr (fn, 2,
2536 build_fold_addr_expr (var),
2537 gcount_tree),
2538 &dtor_statements);
2539 cgraph_build_static_cdtor ('D', dtor_statements, priority);
2540 }
2541 if (asan_ctor_statements)
2542 cgraph_build_static_cdtor ('I', asan_ctor_statements, priority);
2543 flag_sanitize |= SANITIZE_ADDRESS;
2544 }
2545
2546 /* Expand the ASAN_{LOAD,STORE} builtins. */
2547
2548 bool
2549 asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
2550 {
2551 gimple g = gsi_stmt (*iter);
2552 location_t loc = gimple_location (g);
2553
2554 bool recover_p
2555 = (flag_sanitize & flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
2556
2557 HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
2558 gcc_assert (flags < ASAN_CHECK_LAST);
2559 bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
2560 bool is_store = (flags & ASAN_CHECK_STORE) != 0;
2561 bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
2562
2563 tree base = gimple_call_arg (g, 1);
2564 tree len = gimple_call_arg (g, 2);
2565 HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3));
2566
2567 HOST_WIDE_INT size_in_bytes
2568 = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2569
2570 if (use_calls)
2571 {
2572 /* Instrument using callbacks. */
2573 gimple g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2574 NOP_EXPR, base);
2575 gimple_set_location (g, loc);
2576 gsi_insert_before (iter, g, GSI_SAME_STMT);
2577 tree base_addr = gimple_assign_lhs (g);
2578
2579 int nargs;
2580 tree fun = check_func (is_store, recover_p, size_in_bytes, &nargs);
2581 if (nargs == 1)
2582 g = gimple_build_call (fun, 1, base_addr);
2583 else
2584 {
2585 gcc_assert (nargs == 2);
2586 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2587 NOP_EXPR, len);
2588 gimple_set_location (g, loc);
2589 gsi_insert_before (iter, g, GSI_SAME_STMT);
2590 tree sz_arg = gimple_assign_lhs (g);
2591 g = gimple_build_call (fun, nargs, base_addr, sz_arg);
2592 }
2593 gimple_set_location (g, loc);
2594 gsi_replace (iter, g, false);
2595 return false;
2596 }
2597
2598 HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
2599
2600 tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
2601 tree shadow_type = TREE_TYPE (shadow_ptr_type);
2602
2603 gimple_stmt_iterator gsi = *iter;
2604
2605 if (!is_non_zero_len)
2606 {
2607 /* So, the length of the memory area to asan-protect is
2608 non-constant. Let's guard the generated instrumentation code
2609 like:
2610
2611 if (len != 0)
2612 {
2613 //asan instrumentation code goes here.
2614 }
2615 // falltrough instructions, starting with *ITER. */
2616
2617 g = gimple_build_cond (NE_EXPR,
2618 len,
2619 build_int_cst (TREE_TYPE (len), 0),
2620 NULL_TREE, NULL_TREE);
2621 gimple_set_location (g, loc);
2622
2623 basic_block then_bb, fallthrough_bb;
2624 insert_if_then_before_iter (as_a <gcond *> (g), iter,
2625 /*then_more_likely_p=*/true,
2626 &then_bb, &fallthrough_bb);
2627 /* Note that fallthrough_bb starts with the statement that was
2628 pointed to by ITER. */
2629
2630 /* The 'then block' of the 'if (len != 0) condition is where
2631 we'll generate the asan instrumentation code now. */
2632 gsi = gsi_last_bb (then_bb);
2633 }
2634
2635 /* Get an iterator on the point where we can add the condition
2636 statement for the instrumentation. */
2637 basic_block then_bb, else_bb;
2638 gsi = create_cond_insert_point (&gsi, /*before_p*/false,
2639 /*then_more_likely_p=*/false,
2640 /*create_then_fallthru_edge*/recover_p,
2641 &then_bb,
2642 &else_bb);
2643
2644 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2645 NOP_EXPR, base);
2646 gimple_set_location (g, loc);
2647 gsi_insert_before (&gsi, g, GSI_NEW_STMT);
2648 tree base_addr = gimple_assign_lhs (g);
2649
2650 tree t = NULL_TREE;
2651 if (real_size_in_bytes >= 8)
2652 {
2653 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2654 shadow_ptr_type);
2655 t = shadow;
2656 }
2657 else
2658 {
2659 /* Slow path for 1, 2 and 4 byte accesses. */
2660 /* Test (shadow != 0)
2661 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
2662 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2663 shadow_ptr_type);
2664 gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
2665 gimple_seq seq = NULL;
2666 gimple_seq_add_stmt (&seq, shadow_test);
2667 /* Aligned (>= 8 bytes) can test just
2668 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
2669 to be 0. */
2670 if (align < 8)
2671 {
2672 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2673 base_addr, 7));
2674 gimple_seq_add_stmt (&seq,
2675 build_type_cast (shadow_type,
2676 gimple_seq_last (seq)));
2677 if (real_size_in_bytes > 1)
2678 gimple_seq_add_stmt (&seq,
2679 build_assign (PLUS_EXPR,
2680 gimple_seq_last (seq),
2681 real_size_in_bytes - 1));
2682 t = gimple_assign_lhs (gimple_seq_last_stmt (seq));
2683 }
2684 else
2685 t = build_int_cst (shadow_type, real_size_in_bytes - 1);
2686 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow));
2687 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2688 gimple_seq_last (seq)));
2689 t = gimple_assign_lhs (gimple_seq_last (seq));
2690 gimple_seq_set_location (seq, loc);
2691 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2692
2693 /* For non-constant, misaligned or otherwise weird access sizes,
2694 check first and last byte. */
2695 if (size_in_bytes == -1)
2696 {
2697 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2698 MINUS_EXPR, len,
2699 build_int_cst (pointer_sized_int_node, 1));
2700 gimple_set_location (g, loc);
2701 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2702 tree last = gimple_assign_lhs (g);
2703 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2704 PLUS_EXPR, base_addr, last);
2705 gimple_set_location (g, loc);
2706 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2707 tree base_end_addr = gimple_assign_lhs (g);
2708
2709 tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
2710 shadow_ptr_type);
2711 gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
2712 gimple_seq seq = NULL;
2713 gimple_seq_add_stmt (&seq, shadow_test);
2714 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2715 base_end_addr, 7));
2716 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
2717 gimple_seq_last (seq)));
2718 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
2719 gimple_seq_last (seq),
2720 shadow));
2721 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2722 gimple_seq_last (seq)));
2723 gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
2724 gimple_seq_last (seq)));
2725 t = gimple_assign_lhs (gimple_seq_last (seq));
2726 gimple_seq_set_location (seq, loc);
2727 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2728 }
2729 }
2730
2731 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
2732 NULL_TREE, NULL_TREE);
2733 gimple_set_location (g, loc);
2734 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2735
2736 /* Generate call to the run-time library (e.g. __asan_report_load8). */
2737 gsi = gsi_start_bb (then_bb);
2738 int nargs;
2739 tree fun = report_error_func (is_store, recover_p, size_in_bytes, &nargs);
2740 g = gimple_build_call (fun, nargs, base_addr, len);
2741 gimple_set_location (g, loc);
2742 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2743
2744 gsi_remove (iter, true);
2745 *iter = gsi_start_bb (else_bb);
2746
2747 return true;
2748 }
2749
2750 /* Instrument the current function. */
2751
2752 static unsigned int
2753 asan_instrument (void)
2754 {
2755 if (shadow_ptr_types[0] == NULL_TREE)
2756 asan_init_shadow_ptr_types ();
2757 transform_statements ();
2758 return 0;
2759 }
2760
2761 static bool
2762 gate_asan (void)
2763 {
2764 return (flag_sanitize & SANITIZE_ADDRESS) != 0
2765 && !lookup_attribute ("no_sanitize_address",
2766 DECL_ATTRIBUTES (current_function_decl));
2767 }
2768
2769 namespace {
2770
2771 const pass_data pass_data_asan =
2772 {
2773 GIMPLE_PASS, /* type */
2774 "asan", /* name */
2775 OPTGROUP_NONE, /* optinfo_flags */
2776 TV_NONE, /* tv_id */
2777 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2778 0, /* properties_provided */
2779 0, /* properties_destroyed */
2780 0, /* todo_flags_start */
2781 TODO_update_ssa, /* todo_flags_finish */
2782 };
2783
2784 class pass_asan : public gimple_opt_pass
2785 {
2786 public:
2787 pass_asan (gcc::context *ctxt)
2788 : gimple_opt_pass (pass_data_asan, ctxt)
2789 {}
2790
2791 /* opt_pass methods: */
2792 opt_pass * clone () { return new pass_asan (m_ctxt); }
2793 virtual bool gate (function *) { return gate_asan (); }
2794 virtual unsigned int execute (function *) { return asan_instrument (); }
2795
2796 }; // class pass_asan
2797
2798 } // anon namespace
2799
2800 gimple_opt_pass *
2801 make_pass_asan (gcc::context *ctxt)
2802 {
2803 return new pass_asan (ctxt);
2804 }
2805
2806 namespace {
2807
2808 const pass_data pass_data_asan_O0 =
2809 {
2810 GIMPLE_PASS, /* type */
2811 "asan0", /* name */
2812 OPTGROUP_NONE, /* optinfo_flags */
2813 TV_NONE, /* tv_id */
2814 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2815 0, /* properties_provided */
2816 0, /* properties_destroyed */
2817 0, /* todo_flags_start */
2818 TODO_update_ssa, /* todo_flags_finish */
2819 };
2820
2821 class pass_asan_O0 : public gimple_opt_pass
2822 {
2823 public:
2824 pass_asan_O0 (gcc::context *ctxt)
2825 : gimple_opt_pass (pass_data_asan_O0, ctxt)
2826 {}
2827
2828 /* opt_pass methods: */
2829 virtual bool gate (function *) { return !optimize && gate_asan (); }
2830 virtual unsigned int execute (function *) { return asan_instrument (); }
2831
2832 }; // class pass_asan_O0
2833
2834 } // anon namespace
2835
2836 gimple_opt_pass *
2837 make_pass_asan_O0 (gcc::context *ctxt)
2838 {
2839 return new pass_asan_O0 (ctxt);
2840 }
2841
2842 #include "gt-asan.h"