re PR tree-optimization/54570 (FAIL: gcc.dg/builtin-object-size-8.c execution test)
[gcc.git] / gcc / asan.c
1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "gimple.h"
26 #include "tree-iterator.h"
27 #include "tree-flow.h"
28 #include "tree-pass.h"
29 #include "asan.h"
30 #include "gimple-pretty-print.h"
31 #include "target.h"
32 #include "expr.h"
33 #include "optabs.h"
34 #include "output.h"
35 #include "tm_p.h"
36 #include "langhooks.h"
37
38 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
39 with <2x slowdown on average.
40
41 The tool consists of two parts:
42 instrumentation module (this file) and a run-time library.
43 The instrumentation module adds a run-time check before every memory insn.
44 For a 8- or 16- byte load accessing address X:
45 ShadowAddr = (X >> 3) + Offset
46 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
47 if (ShadowValue)
48 __asan_report_load8(X);
49 For a load of N bytes (N=1, 2 or 4) from address X:
50 ShadowAddr = (X >> 3) + Offset
51 ShadowValue = *(char*)ShadowAddr;
52 if (ShadowValue)
53 if ((X & 7) + N - 1 > ShadowValue)
54 __asan_report_loadN(X);
55 Stores are instrumented similarly, but using __asan_report_storeN functions.
56 A call too __asan_init() is inserted to the list of module CTORs.
57
58 The run-time library redefines malloc (so that redzone are inserted around
59 the allocated memory) and free (so that reuse of free-ed memory is delayed),
60 provides __asan_report* and __asan_init functions.
61
62 Read more:
63 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
64
65 The current implementation supports detection of out-of-bounds and
66 use-after-free in the heap, on the stack and for global variables.
67
68 [Protection of stack variables]
69
70 To understand how detection of out-of-bounds and use-after-free works
71 for stack variables, lets look at this example on x86_64 where the
72 stack grows downward:
73
74 int
75 foo ()
76 {
77 char a[23] = {0};
78 int b[2] = {0};
79
80 a[5] = 1;
81 b[1] = 2;
82
83 return a[5] + b[1];
84 }
85
86 For this function, the stack protected by asan will be organized as
87 follows, from the top of the stack to the bottom:
88
89 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
90
91 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
92 the next slot be 32 bytes aligned; this one is called Partial
93 Redzone; this 32 bytes alignment is an asan constraint]
94
95 Slot 3/ [24 bytes for variable 'a']
96
97 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
98
99 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
100
101 Slot 6/ [8 bytes for variable 'b']
102
103 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
104 'LEFT RedZone']
105
106 The 32 bytes of LEFT red zone at the bottom of the stack can be
107 decomposed as such:
108
109 1/ The first 8 bytes contain a magical asan number that is always
110 0x41B58AB3.
111
112 2/ The following 8 bytes contains a pointer to a string (to be
113 parsed at runtime by the runtime asan library), which format is
114 the following:
115
116 "<function-name> <space> <num-of-variables-on-the-stack>
117 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
118 <length-of-var-in-bytes> ){n} "
119
120 where '(...){n}' means the content inside the parenthesis occurs 'n'
121 times, with 'n' being the number of variables on the stack.
122
123 3/ The following 16 bytes of the red zone have no particular
124 format.
125
126 The shadow memory for that stack layout is going to look like this:
127
128 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
129 The F1 byte pattern is a magic number called
130 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
131 the memory for that shadow byte is part of a the LEFT red zone
132 intended to seat at the bottom of the variables on the stack.
133
134 - content of shadow memory 8 bytes for slots 6 and 5:
135 0xF4F4F400. The F4 byte pattern is a magic number
136 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
137 memory region for this shadow byte is a PARTIAL red zone
138 intended to pad a variable A, so that the slot following
139 {A,padding} is 32 bytes aligned.
140
141 Note that the fact that the least significant byte of this
142 shadow memory content is 00 means that 8 bytes of its
143 corresponding memory (which corresponds to the memory of
144 variable 'b') is addressable.
145
146 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
147 The F2 byte pattern is a magic number called
148 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
149 region for this shadow byte is a MIDDLE red zone intended to
150 seat between two 32 aligned slots of {variable,padding}.
151
152 - content of shadow memory 8 bytes for slot 3 and 2:
153 0xF4000000. This represents is the concatenation of
154 variable 'a' and the partial red zone following it, like what we
155 had for variable 'b'. The least significant 3 bytes being 00
156 means that the 3 bytes of variable 'a' are addressable.
157
158 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
159 The F3 byte pattern is a magic number called
160 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
161 region for this shadow byte is a RIGHT red zone intended to seat
162 at the top of the variables of the stack.
163
164 Note that the real variable layout is done in expand_used_vars in
165 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
166 stack variables as well as the different red zones, emits some
167 prologue code to populate the shadow memory as to poison (mark as
168 non-accessible) the regions of the red zones and mark the regions of
169 stack variables as accessible, and emit some epilogue code to
170 un-poison (mark as accessible) the regions of red zones right before
171 the function exits.
172
173 [Protection of global variables]
174
175 The basic idea is to insert a red zone between two global variables
176 and install a constructor function that calls the asan runtime to do
177 the populating of the relevant shadow memory regions at load time.
178
179 So the global variables are laid out as to insert a red zone between
180 them. The size of the red zones is so that each variable starts on a
181 32 bytes boundary.
182
183 Then a constructor function is installed so that, for each global
184 variable, it calls the runtime asan library function
185 __asan_register_globals_with an instance of this type:
186
187 struct __asan_global
188 {
189 // Address of the beginning of the global variable.
190 const void *__beg;
191
192 // Initial size of the global variable.
193 uptr __size;
194
195 // Size of the global variable + size of the red zone. This
196 // size is 32 bytes aligned.
197 uptr __size_with_redzone;
198
199 // Name of the global variable.
200 const void *__name;
201
202 // This is always set to NULL for now.
203 uptr __has_dynamic_init;
204 }
205
206 A destructor function that calls the runtime asan library function
207 _asan_unregister_globals is also installed. */
208
209 alias_set_type asan_shadow_set = -1;
210
211 /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
212 alias set is used for all shadow memory accesses. */
213 static GTY(()) tree shadow_ptr_types[2];
214
215 /* Initialize shadow_ptr_types array. */
216
217 static void
218 asan_init_shadow_ptr_types (void)
219 {
220 asan_shadow_set = new_alias_set ();
221 shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
222 TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
223 shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
224 shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
225 TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
226 shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
227 initialize_sanitizer_builtins ();
228 }
229
230 /* Asan pretty-printer, used for buidling of the description STRING_CSTs. */
231 static pretty_printer asan_pp;
232 static bool asan_pp_initialized;
233
234 /* Initialize asan_pp. */
235
236 static void
237 asan_pp_initialize (void)
238 {
239 pp_construct (&asan_pp, /* prefix */NULL, /* line-width */0);
240 asan_pp_initialized = true;
241 }
242
243 /* Create ADDR_EXPR of STRING_CST with asan_pp text. */
244
245 static tree
246 asan_pp_string (void)
247 {
248 const char *buf = pp_base_formatted_text (&asan_pp);
249 size_t len = strlen (buf);
250 tree ret = build_string (len + 1, buf);
251 TREE_TYPE (ret)
252 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
253 build_index_type (size_int (len)));
254 TREE_READONLY (ret) = 1;
255 TREE_STATIC (ret) = 1;
256 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
257 }
258
259 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
260
261 static rtx
262 asan_shadow_cst (unsigned char shadow_bytes[4])
263 {
264 int i;
265 unsigned HOST_WIDE_INT val = 0;
266 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
267 for (i = 0; i < 4; i++)
268 val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
269 << (BITS_PER_UNIT * i);
270 return GEN_INT (trunc_int_for_mode (val, SImode));
271 }
272
273 /* Insert code to protect stack vars. The prologue sequence should be emitted
274 directly, epilogue sequence returned. BASE is the register holding the
275 stack base, against which OFFSETS array offsets are relative to, OFFSETS
276 array contains pairs of offsets in reverse order, always the end offset
277 of some gap that needs protection followed by starting offset,
278 and DECLS is an array of representative decls for each var partition.
279 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
280 elements long (OFFSETS include gap before the first variable as well
281 as gaps after each stack variable). */
282
283 rtx
284 asan_emit_stack_protection (rtx base, HOST_WIDE_INT *offsets, tree *decls,
285 int length)
286 {
287 rtx shadow_base, shadow_mem, ret, mem;
288 unsigned char shadow_bytes[4];
289 HOST_WIDE_INT base_offset = offsets[length - 1], offset, prev_offset;
290 HOST_WIDE_INT last_offset, last_size;
291 int l;
292 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
293 tree str_cst;
294
295 if (shadow_ptr_types[0] == NULL_TREE)
296 asan_init_shadow_ptr_types ();
297
298 /* First of all, prepare the description string. */
299 if (!asan_pp_initialized)
300 asan_pp_initialize ();
301
302 pp_clear_output_area (&asan_pp);
303 if (DECL_NAME (current_function_decl))
304 pp_base_tree_identifier (&asan_pp, DECL_NAME (current_function_decl));
305 else
306 pp_string (&asan_pp, "<unknown>");
307 pp_space (&asan_pp);
308 pp_decimal_int (&asan_pp, length / 2 - 1);
309 pp_space (&asan_pp);
310 for (l = length - 2; l; l -= 2)
311 {
312 tree decl = decls[l / 2 - 1];
313 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
314 pp_space (&asan_pp);
315 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
316 pp_space (&asan_pp);
317 if (DECL_P (decl) && DECL_NAME (decl))
318 {
319 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
320 pp_space (&asan_pp);
321 pp_base_tree_identifier (&asan_pp, DECL_NAME (decl));
322 }
323 else
324 pp_string (&asan_pp, "9 <unknown>");
325 pp_space (&asan_pp);
326 }
327 str_cst = asan_pp_string ();
328
329 /* Emit the prologue sequence. */
330 base = expand_binop (Pmode, add_optab, base, GEN_INT (base_offset),
331 NULL_RTX, 1, OPTAB_DIRECT);
332 mem = gen_rtx_MEM (ptr_mode, base);
333 emit_move_insn (mem, GEN_INT (ASAN_STACK_FRAME_MAGIC));
334 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
335 emit_move_insn (mem, expand_normal (str_cst));
336 shadow_base = expand_binop (Pmode, lshr_optab, base,
337 GEN_INT (ASAN_SHADOW_SHIFT),
338 NULL_RTX, 1, OPTAB_DIRECT);
339 shadow_base = expand_binop (Pmode, add_optab, shadow_base,
340 GEN_INT (targetm.asan_shadow_offset ()),
341 NULL_RTX, 1, OPTAB_DIRECT);
342 gcc_assert (asan_shadow_set != -1
343 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
344 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
345 set_mem_alias_set (shadow_mem, asan_shadow_set);
346 prev_offset = base_offset;
347 for (l = length; l; l -= 2)
348 {
349 if (l == 2)
350 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
351 offset = offsets[l - 1];
352 if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
353 {
354 int i;
355 HOST_WIDE_INT aoff
356 = base_offset + ((offset - base_offset)
357 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
358 shadow_mem = adjust_address (shadow_mem, VOIDmode,
359 (aoff - prev_offset)
360 >> ASAN_SHADOW_SHIFT);
361 prev_offset = aoff;
362 for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
363 if (aoff < offset)
364 {
365 if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
366 shadow_bytes[i] = 0;
367 else
368 shadow_bytes[i] = offset - aoff;
369 }
370 else
371 shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
372 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
373 offset = aoff;
374 }
375 while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
376 {
377 shadow_mem = adjust_address (shadow_mem, VOIDmode,
378 (offset - prev_offset)
379 >> ASAN_SHADOW_SHIFT);
380 prev_offset = offset;
381 memset (shadow_bytes, cur_shadow_byte, 4);
382 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
383 offset += ASAN_RED_ZONE_SIZE;
384 }
385 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
386 }
387 do_pending_stack_adjust ();
388
389 /* Construct epilogue sequence. */
390 start_sequence ();
391
392 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
393 set_mem_alias_set (shadow_mem, asan_shadow_set);
394 prev_offset = base_offset;
395 last_offset = base_offset;
396 last_size = 0;
397 for (l = length; l; l -= 2)
398 {
399 offset = base_offset + ((offsets[l - 1] - base_offset)
400 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
401 if (last_offset + last_size != offset)
402 {
403 shadow_mem = adjust_address (shadow_mem, VOIDmode,
404 (last_offset - prev_offset)
405 >> ASAN_SHADOW_SHIFT);
406 prev_offset = last_offset;
407 clear_storage (shadow_mem, GEN_INT (last_size >> ASAN_SHADOW_SHIFT),
408 BLOCK_OP_NORMAL);
409 last_offset = offset;
410 last_size = 0;
411 }
412 last_size += base_offset + ((offsets[l - 2] - base_offset)
413 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
414 - offset;
415 }
416 if (last_size)
417 {
418 shadow_mem = adjust_address (shadow_mem, VOIDmode,
419 (last_offset - prev_offset)
420 >> ASAN_SHADOW_SHIFT);
421 clear_storage (shadow_mem, GEN_INT (last_size >> ASAN_SHADOW_SHIFT),
422 BLOCK_OP_NORMAL);
423 }
424
425 do_pending_stack_adjust ();
426
427 ret = get_insns ();
428 end_sequence ();
429 return ret;
430 }
431
432 /* Return true if DECL, a global var, might be overridden and needs
433 therefore a local alias. */
434
435 static bool
436 asan_needs_local_alias (tree decl)
437 {
438 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
439 }
440
441 /* Return true if DECL is a VAR_DECL that should be protected
442 by Address Sanitizer, by appending a red zone with protected
443 shadow memory after it and aligning it to at least
444 ASAN_RED_ZONE_SIZE bytes. */
445
446 bool
447 asan_protect_global (tree decl)
448 {
449 rtx rtl, symbol;
450
451 if (TREE_CODE (decl) == STRING_CST)
452 {
453 /* Instrument all STRING_CSTs except those created
454 by asan_pp_string here. */
455 if (shadow_ptr_types[0] != NULL_TREE
456 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
457 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
458 return false;
459 return true;
460 }
461 if (TREE_CODE (decl) != VAR_DECL
462 /* TLS vars aren't statically protectable. */
463 || DECL_THREAD_LOCAL_P (decl)
464 /* Externs will be protected elsewhere. */
465 || DECL_EXTERNAL (decl)
466 || !TREE_ASM_WRITTEN (decl)
467 || !DECL_RTL_SET_P (decl)
468 /* Comdat vars pose an ABI problem, we can't know if
469 the var that is selected by the linker will have
470 padding or not. */
471 || DECL_ONE_ONLY (decl)
472 /* Similarly for common vars. People can use -fno-common. */
473 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
474 /* Don't protect if using user section, often vars placed
475 into user section from multiple TUs are then assumed
476 to be an array of such vars, putting padding in there
477 breaks this assumption. */
478 || (DECL_SECTION_NAME (decl) != NULL_TREE
479 && !DECL_HAS_IMPLICIT_SECTION_NAME_P (decl))
480 || DECL_SIZE (decl) == 0
481 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
482 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
483 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE)
484 return false;
485
486 rtl = DECL_RTL (decl);
487 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
488 return false;
489 symbol = XEXP (rtl, 0);
490
491 if (CONSTANT_POOL_ADDRESS_P (symbol)
492 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
493 return false;
494
495 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
496 return false;
497
498 #ifndef ASM_OUTPUT_DEF
499 if (asan_needs_local_alias (decl))
500 return false;
501 #endif
502
503 return true;
504 }
505
506 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16}.
507 IS_STORE is either 1 (for a store) or 0 (for a load).
508 SIZE_IN_BYTES is one of 1, 2, 4, 8, 16. */
509
510 static tree
511 report_error_func (bool is_store, int size_in_bytes)
512 {
513 static enum built_in_function report[2][5]
514 = { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
515 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
516 BUILT_IN_ASAN_REPORT_LOAD16 },
517 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
518 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
519 BUILT_IN_ASAN_REPORT_STORE16 } };
520 return builtin_decl_implicit (report[is_store][exact_log2 (size_in_bytes)]);
521 }
522
523 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 2000 - 1)
524 #define PROB_ALWAYS (REG_BR_PROB_BASE)
525
526 /* Split the current basic block and create a condition statement
527 insertion point right before or after the statement pointed to by
528 ITER. Return an iterator to the point at which the caller might
529 safely insert the condition statement.
530
531 THEN_BLOCK must be set to the address of an uninitialized instance
532 of basic_block. The function will then set *THEN_BLOCK to the
533 'then block' of the condition statement to be inserted by the
534 caller.
535
536 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
537 block' of the condition statement to be inserted by the caller.
538
539 Note that *FALLTHROUGH_BLOCK is a new block that contains the
540 statements starting from *ITER, and *THEN_BLOCK is a new empty
541 block.
542
543 *ITER is adjusted to point to always point to the first statement
544 of the basic block * FALLTHROUGH_BLOCK. That statement is the
545 same as what ITER was pointing to prior to calling this function,
546 if BEFORE_P is true; otherwise, it is its following statement. */
547
548 static gimple_stmt_iterator
549 create_cond_insert_point (gimple_stmt_iterator *iter,
550 bool before_p,
551 bool then_more_likely_p,
552 basic_block *then_block,
553 basic_block *fallthrough_block)
554 {
555 gimple_stmt_iterator gsi = *iter;
556
557 if (!gsi_end_p (gsi) && before_p)
558 gsi_prev (&gsi);
559
560 basic_block cur_bb = gsi_bb (*iter);
561
562 edge e = split_block (cur_bb, gsi_stmt (gsi));
563
564 /* Get a hold on the 'condition block', the 'then block' and the
565 'else block'. */
566 basic_block cond_bb = e->src;
567 basic_block fallthru_bb = e->dest;
568 basic_block then_bb = create_empty_bb (cond_bb);
569
570 /* Set up the newly created 'then block'. */
571 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
572 int fallthrough_probability
573 = then_more_likely_p
574 ? PROB_VERY_UNLIKELY
575 : PROB_ALWAYS - PROB_VERY_UNLIKELY;
576 e->probability = PROB_ALWAYS - fallthrough_probability;
577 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
578
579 /* Set up the fallthrough basic block. */
580 e = find_edge (cond_bb, fallthru_bb);
581 e->flags = EDGE_FALSE_VALUE;
582 e->count = cond_bb->count;
583 e->probability = fallthrough_probability;
584
585 /* Update dominance info for the newly created then_bb; note that
586 fallthru_bb's dominance info has already been updated by
587 split_bock. */
588 if (dom_info_available_p (CDI_DOMINATORS))
589 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
590
591 *then_block = then_bb;
592 *fallthrough_block = fallthru_bb;
593 *iter = gsi_start_bb (fallthru_bb);
594
595 return gsi_last_bb (cond_bb);
596 }
597
598 /* Insert an if condition followed by a 'then block' right before the
599 statement pointed to by ITER. The fallthrough block -- which is the
600 else block of the condition as well as the destination of the
601 outcoming edge of the 'then block' -- starts with the statement
602 pointed to by ITER.
603
604 COND is the condition of the if.
605
606 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
607 'then block' is higher than the probability of the edge to the
608 fallthrough block.
609
610 Upon completion of the function, *THEN_BB is set to the newly
611 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
612 fallthrough block.
613
614 *ITER is adjusted to still point to the same statement it was
615 pointing to initially. */
616
617 static void
618 insert_if_then_before_iter (gimple cond,
619 gimple_stmt_iterator *iter,
620 bool then_more_likely_p,
621 basic_block *then_bb,
622 basic_block *fallthrough_bb)
623 {
624 gimple_stmt_iterator cond_insert_point =
625 create_cond_insert_point (iter,
626 /*before_p=*/true,
627 then_more_likely_p,
628 then_bb,
629 fallthrough_bb);
630 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
631 }
632
633 /* Instrument the memory access instruction BASE. Insert new
634 statements before or after ITER.
635
636 Note that the memory access represented by BASE can be either an
637 SSA_NAME, or a non-SSA expression. LOCATION is the source code
638 location. IS_STORE is TRUE for a store, FALSE for a load.
639 BEFORE_P is TRUE for inserting the instrumentation code before
640 ITER, FALSE for inserting it after ITER. SIZE_IN_BYTES is one of
641 1, 2, 4, 8, 16.
642
643 If BEFORE_P is TRUE, *ITER is arranged to still point to the
644 statement it was pointing to prior to calling this function,
645 otherwise, it points to the statement logically following it. */
646
647 static void
648 build_check_stmt (location_t location, tree base, gimple_stmt_iterator *iter,
649 bool before_p, bool is_store, int size_in_bytes)
650 {
651 gimple_stmt_iterator gsi;
652 basic_block then_bb, else_bb;
653 tree t, base_addr, shadow;
654 gimple g;
655 tree shadow_ptr_type = shadow_ptr_types[size_in_bytes == 16 ? 1 : 0];
656 tree shadow_type = TREE_TYPE (shadow_ptr_type);
657 tree uintptr_type
658 = build_nonstandard_integer_type (TYPE_PRECISION (TREE_TYPE (base)), 1);
659 tree base_ssa = base;
660
661 /* Get an iterator on the point where we can add the condition
662 statement for the instrumentation. */
663 gsi = create_cond_insert_point (iter, before_p,
664 /*then_more_likely_p=*/false,
665 &then_bb,
666 &else_bb);
667
668 base = unshare_expr (base);
669
670 /* BASE can already be an SSA_NAME; in that case, do not create a
671 new SSA_NAME for it. */
672 if (TREE_CODE (base) != SSA_NAME)
673 {
674 g = gimple_build_assign_with_ops (TREE_CODE (base),
675 make_ssa_name (TREE_TYPE (base), NULL),
676 base, NULL_TREE);
677 gimple_set_location (g, location);
678 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
679 base_ssa = gimple_assign_lhs (g);
680 }
681
682 g = gimple_build_assign_with_ops (NOP_EXPR,
683 make_ssa_name (uintptr_type, NULL),
684 base_ssa, NULL_TREE);
685 gimple_set_location (g, location);
686 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
687 base_addr = gimple_assign_lhs (g);
688
689 /* Build
690 (base_addr >> ASAN_SHADOW_SHIFT) + targetm.asan_shadow_offset (). */
691
692 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
693 g = gimple_build_assign_with_ops (RSHIFT_EXPR,
694 make_ssa_name (uintptr_type, NULL),
695 base_addr, t);
696 gimple_set_location (g, location);
697 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
698
699 t = build_int_cst (uintptr_type, targetm.asan_shadow_offset ());
700 g = gimple_build_assign_with_ops (PLUS_EXPR,
701 make_ssa_name (uintptr_type, NULL),
702 gimple_assign_lhs (g), t);
703 gimple_set_location (g, location);
704 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
705
706 g = gimple_build_assign_with_ops (NOP_EXPR,
707 make_ssa_name (shadow_ptr_type, NULL),
708 gimple_assign_lhs (g), NULL_TREE);
709 gimple_set_location (g, location);
710 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
711
712 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
713 build_int_cst (shadow_ptr_type, 0));
714 g = gimple_build_assign_with_ops (MEM_REF,
715 make_ssa_name (shadow_type, NULL),
716 t, NULL_TREE);
717 gimple_set_location (g, location);
718 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
719 shadow = gimple_assign_lhs (g);
720
721 if (size_in_bytes < 8)
722 {
723 /* Slow path for 1, 2 and 4 byte accesses.
724 Test (shadow != 0)
725 & ((base_addr & 7) + (size_in_bytes - 1)) >= shadow). */
726 g = gimple_build_assign_with_ops (NE_EXPR,
727 make_ssa_name (boolean_type_node,
728 NULL),
729 shadow,
730 build_int_cst (shadow_type, 0));
731 gimple_set_location (g, location);
732 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
733 t = gimple_assign_lhs (g);
734
735 g = gimple_build_assign_with_ops (BIT_AND_EXPR,
736 make_ssa_name (uintptr_type,
737 NULL),
738 base_addr,
739 build_int_cst (uintptr_type, 7));
740 gimple_set_location (g, location);
741 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
742
743 g = gimple_build_assign_with_ops (NOP_EXPR,
744 make_ssa_name (shadow_type,
745 NULL),
746 gimple_assign_lhs (g), NULL_TREE);
747 gimple_set_location (g, location);
748 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
749
750 if (size_in_bytes > 1)
751 {
752 g = gimple_build_assign_with_ops (PLUS_EXPR,
753 make_ssa_name (shadow_type,
754 NULL),
755 gimple_assign_lhs (g),
756 build_int_cst (shadow_type,
757 size_in_bytes - 1));
758 gimple_set_location (g, location);
759 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
760 }
761
762 g = gimple_build_assign_with_ops (GE_EXPR,
763 make_ssa_name (boolean_type_node,
764 NULL),
765 gimple_assign_lhs (g),
766 shadow);
767 gimple_set_location (g, location);
768 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
769
770 g = gimple_build_assign_with_ops (BIT_AND_EXPR,
771 make_ssa_name (boolean_type_node,
772 NULL),
773 t, gimple_assign_lhs (g));
774 gimple_set_location (g, location);
775 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
776 t = gimple_assign_lhs (g);
777 }
778 else
779 t = shadow;
780
781 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
782 NULL_TREE, NULL_TREE);
783 gimple_set_location (g, location);
784 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
785
786 /* Generate call to the run-time library (e.g. __asan_report_load8). */
787 gsi = gsi_start_bb (then_bb);
788 g = gimple_build_call (report_error_func (is_store, size_in_bytes),
789 1, base_addr);
790 gimple_set_location (g, location);
791 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
792
793 *iter = gsi_start_bb (else_bb);
794 }
795
796 /* If T represents a memory access, add instrumentation code before ITER.
797 LOCATION is source code location.
798 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
799
800 static void
801 instrument_derefs (gimple_stmt_iterator *iter, tree t,
802 location_t location, bool is_store)
803 {
804 tree type, base;
805 HOST_WIDE_INT size_in_bytes;
806
807 type = TREE_TYPE (t);
808 switch (TREE_CODE (t))
809 {
810 case ARRAY_REF:
811 case COMPONENT_REF:
812 case INDIRECT_REF:
813 case MEM_REF:
814 break;
815 default:
816 return;
817 }
818
819 size_in_bytes = int_size_in_bytes (type);
820 if ((size_in_bytes & (size_in_bytes - 1)) != 0
821 || (unsigned HOST_WIDE_INT) size_in_bytes - 1 >= 16)
822 return;
823
824 HOST_WIDE_INT bitsize, bitpos;
825 tree offset;
826 enum machine_mode mode;
827 int volatilep = 0, unsignedp = 0;
828 get_inner_reference (t, &bitsize, &bitpos, &offset,
829 &mode, &unsignedp, &volatilep, false);
830 if (bitpos % (size_in_bytes * BITS_PER_UNIT)
831 || bitsize != size_in_bytes * BITS_PER_UNIT)
832 {
833 if (TREE_CODE (t) == COMPONENT_REF
834 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
835 {
836 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
837 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
838 TREE_OPERAND (t, 0), repr,
839 NULL_TREE), location, is_store);
840 }
841 return;
842 }
843
844 base = build_fold_addr_expr (t);
845 build_check_stmt (location, base, iter, /*before_p=*/true,
846 is_store, size_in_bytes);
847 }
848
849 /* Instrument an access to a contiguous memory region that starts at
850 the address pointed to by BASE, over a length of LEN (expressed in
851 the sizeof (*BASE) bytes). ITER points to the instruction before
852 which the instrumentation instructions must be inserted. LOCATION
853 is the source location that the instrumentation instructions must
854 have. If IS_STORE is true, then the memory access is a store;
855 otherwise, it's a load. */
856
857 static void
858 instrument_mem_region_access (tree base, tree len,
859 gimple_stmt_iterator *iter,
860 location_t location, bool is_store)
861 {
862 if (!POINTER_TYPE_P (TREE_TYPE (base))
863 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
864 || integer_zerop (len))
865 return;
866
867 gimple_stmt_iterator gsi = *iter;
868
869 basic_block fallthrough_bb = NULL, then_bb = NULL;
870 if (!is_gimple_constant (len))
871 {
872 /* So, the length of the memory area to asan-protect is
873 non-constant. Let's guard the generated instrumentation code
874 like:
875
876 if (len != 0)
877 {
878 //asan instrumentation code goes here.
879 }
880 // falltrough instructions, starting with *ITER. */
881
882 gimple g = gimple_build_cond (NE_EXPR,
883 len,
884 build_int_cst (TREE_TYPE (len), 0),
885 NULL_TREE, NULL_TREE);
886 gimple_set_location (g, location);
887 insert_if_then_before_iter (g, iter, /*then_more_likely_p=*/true,
888 &then_bb, &fallthrough_bb);
889 /* Note that fallthrough_bb starts with the statement that was
890 pointed to by ITER. */
891
892 /* The 'then block' of the 'if (len != 0) condition is where
893 we'll generate the asan instrumentation code now. */
894 gsi = gsi_start_bb (then_bb);
895 }
896
897 /* Instrument the beginning of the memory region to be accessed,
898 and arrange for the rest of the intrumentation code to be
899 inserted in the then block *after* the current gsi. */
900 build_check_stmt (location, base, &gsi, /*before_p=*/true, is_store, 1);
901
902 if (then_bb)
903 /* We are in the case where the length of the region is not
904 constant; so instrumentation code is being generated in the
905 'then block' of the 'if (len != 0) condition. Let's arrange
906 for the subsequent instrumentation statements to go in the
907 'then block'. */
908 gsi = gsi_last_bb (then_bb);
909 else
910 *iter = gsi;
911
912 /* We want to instrument the access at the end of the memory region,
913 which is at (base + len - 1). */
914
915 /* offset = len - 1; */
916 len = unshare_expr (len);
917 tree offset;
918 gimple_seq seq = NULL;
919 if (TREE_CODE (len) == INTEGER_CST)
920 offset = fold_build2 (MINUS_EXPR, size_type_node,
921 fold_convert (size_type_node, len),
922 build_int_cst (size_type_node, 1));
923 else
924 {
925 gimple g;
926 tree t;
927
928 if (TREE_CODE (len) != SSA_NAME)
929 {
930 t = make_ssa_name (TREE_TYPE (len), NULL);
931 g = gimple_build_assign_with_ops (TREE_CODE (len), t, len, NULL);
932 gimple_set_location (g, location);
933 gimple_seq_add_stmt_without_update (&seq, g);
934 len = t;
935 }
936 if (!useless_type_conversion_p (size_type_node, TREE_TYPE (len)))
937 {
938 t = make_ssa_name (size_type_node, NULL);
939 g = gimple_build_assign_with_ops (NOP_EXPR, t, len, NULL);
940 gimple_set_location (g, location);
941 gimple_seq_add_stmt_without_update (&seq, g);
942 len = t;
943 }
944
945 t = make_ssa_name (size_type_node, NULL);
946 g = gimple_build_assign_with_ops (MINUS_EXPR, t, len,
947 build_int_cst (size_type_node, 1));
948 gimple_set_location (g, location);
949 gimple_seq_add_stmt_without_update (&seq, g);
950 offset = gimple_assign_lhs (g);
951 }
952
953 /* _1 = base; */
954 base = unshare_expr (base);
955 gimple region_end =
956 gimple_build_assign_with_ops (TREE_CODE (base),
957 make_ssa_name (TREE_TYPE (base), NULL),
958 base, NULL);
959 gimple_set_location (region_end, location);
960 gimple_seq_add_stmt_without_update (&seq, region_end);
961 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
962 gsi_prev (&gsi);
963
964 /* _2 = _1 + offset; */
965 region_end =
966 gimple_build_assign_with_ops (POINTER_PLUS_EXPR,
967 make_ssa_name (TREE_TYPE (base), NULL),
968 gimple_assign_lhs (region_end),
969 offset);
970 gimple_set_location (region_end, location);
971 gsi_insert_after (&gsi, region_end, GSI_NEW_STMT);
972
973 /* instrument access at _2; */
974 build_check_stmt (location, gimple_assign_lhs (region_end),
975 &gsi, /*before_p=*/false, is_store, 1);
976 }
977
978 /* Instrument the call (to the builtin strlen function) pointed to by
979 ITER.
980
981 This function instruments the access to the first byte of the
982 argument, right before the call. After the call it instruments the
983 access to the last byte of the argument; it uses the result of the
984 call to deduce the offset of that last byte.
985
986 Upon completion, iff the call has actullay been instrumented, this
987 function returns TRUE and *ITER points to the statement logically
988 following the built-in strlen function call *ITER was initially
989 pointing to. Otherwise, the function returns FALSE and *ITER
990 remains unchanged. */
991
992 static bool
993 instrument_strlen_call (gimple_stmt_iterator *iter)
994 {
995 gimple call = gsi_stmt (*iter);
996 gcc_assert (is_gimple_call (call));
997
998 tree callee = gimple_call_fndecl (call);
999 gcc_assert (is_builtin_fn (callee)
1000 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
1001 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STRLEN);
1002
1003 tree len = gimple_call_lhs (call);
1004 if (len == NULL)
1005 /* Some passes might clear the return value of the strlen call;
1006 bail out in that case. Return FALSE as we are not advancing
1007 *ITER. */
1008 return false;
1009 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (len)));
1010
1011 location_t loc = gimple_location (call);
1012 tree str_arg = gimple_call_arg (call, 0);
1013
1014 /* Instrument the access to the first byte of str_arg. i.e:
1015
1016 _1 = str_arg; instrument (_1); */
1017 gimple str_arg_ssa =
1018 gimple_build_assign_with_ops (NOP_EXPR,
1019 make_ssa_name (build_pointer_type
1020 (char_type_node), NULL),
1021 str_arg, NULL);
1022 gimple_set_location (str_arg_ssa, loc);
1023 gimple_stmt_iterator gsi = *iter;
1024 gsi_insert_before (&gsi, str_arg_ssa, GSI_NEW_STMT);
1025 build_check_stmt (loc, gimple_assign_lhs (str_arg_ssa), &gsi,
1026 /*before_p=*/false, /*is_store=*/false, 1);
1027
1028 /* If we initially had an instruction like:
1029
1030 int n = strlen (str)
1031
1032 we now want to instrument the access to str[n], after the
1033 instruction above.*/
1034
1035 /* So let's build the access to str[n] that is, access through the
1036 pointer_plus expr: (_1 + len). */
1037 gimple stmt =
1038 gimple_build_assign_with_ops (POINTER_PLUS_EXPR,
1039 make_ssa_name (TREE_TYPE (str_arg),
1040 NULL),
1041 gimple_assign_lhs (str_arg_ssa),
1042 len);
1043 gimple_set_location (stmt, loc);
1044 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
1045
1046 build_check_stmt (loc, gimple_assign_lhs (stmt), &gsi,
1047 /*before_p=*/false, /*is_store=*/false, 1);
1048
1049 /* Ensure that iter points to the statement logically following the
1050 one it was initially pointing to. */
1051 *iter = gsi;
1052 /* As *ITER has been advanced to point to the next statement, let's
1053 return true to inform transform_statements that it shouldn't
1054 advance *ITER anymore; otherwises it will skip that next
1055 statement, which wouldn't be instrumented. */
1056 return true;
1057 }
1058
1059 /* Instrument the call to a built-in memory access function that is
1060 pointed to by the iterator ITER.
1061
1062 Upon completion, return TRUE iff *ITER has been advanced to the
1063 statement following the one it was originally pointing to. */
1064
1065 static bool
1066 instrument_builtin_call (gimple_stmt_iterator *iter)
1067 {
1068 gimple call = gsi_stmt (*iter);
1069
1070 gcc_checking_assert (is_gimple_builtin_call (call));
1071
1072 tree callee = gimple_call_fndecl (call);
1073 location_t loc = gimple_location (call);
1074 tree source0 = NULL_TREE, source1 = NULL_TREE,
1075 dest = NULL_TREE, len = NULL_TREE;
1076 bool is_store = true;
1077
1078 switch (DECL_FUNCTION_CODE (callee))
1079 {
1080 /* (s, s, n) style memops. */
1081 case BUILT_IN_BCMP:
1082 case BUILT_IN_MEMCMP:
1083 source0 = gimple_call_arg (call, 0);
1084 source1 = gimple_call_arg (call, 1);
1085 len = gimple_call_arg (call, 2);
1086 break;
1087
1088 /* (src, dest, n) style memops. */
1089 case BUILT_IN_BCOPY:
1090 source0 = gimple_call_arg (call, 0);
1091 dest = gimple_call_arg (call, 1);
1092 len = gimple_call_arg (call, 2);
1093 break;
1094
1095 /* (dest, src, n) style memops. */
1096 case BUILT_IN_MEMCPY:
1097 case BUILT_IN_MEMCPY_CHK:
1098 case BUILT_IN_MEMMOVE:
1099 case BUILT_IN_MEMMOVE_CHK:
1100 case BUILT_IN_MEMPCPY:
1101 case BUILT_IN_MEMPCPY_CHK:
1102 dest = gimple_call_arg (call, 0);
1103 source0 = gimple_call_arg (call, 1);
1104 len = gimple_call_arg (call, 2);
1105 break;
1106
1107 /* (dest, n) style memops. */
1108 case BUILT_IN_BZERO:
1109 dest = gimple_call_arg (call, 0);
1110 len = gimple_call_arg (call, 1);
1111 break;
1112
1113 /* (dest, x, n) style memops*/
1114 case BUILT_IN_MEMSET:
1115 case BUILT_IN_MEMSET_CHK:
1116 dest = gimple_call_arg (call, 0);
1117 len = gimple_call_arg (call, 2);
1118 break;
1119
1120 case BUILT_IN_STRLEN:
1121 return instrument_strlen_call (iter);
1122
1123 /* And now the __atomic* and __sync builtins.
1124 These are handled differently from the classical memory memory
1125 access builtins above. */
1126
1127 case BUILT_IN_ATOMIC_LOAD_1:
1128 case BUILT_IN_ATOMIC_LOAD_2:
1129 case BUILT_IN_ATOMIC_LOAD_4:
1130 case BUILT_IN_ATOMIC_LOAD_8:
1131 case BUILT_IN_ATOMIC_LOAD_16:
1132 is_store = false;
1133 /* fall through. */
1134
1135 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
1136 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
1137 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
1138 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
1139 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
1140
1141 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
1142 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
1143 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
1144 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
1145 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
1146
1147 case BUILT_IN_SYNC_FETCH_AND_OR_1:
1148 case BUILT_IN_SYNC_FETCH_AND_OR_2:
1149 case BUILT_IN_SYNC_FETCH_AND_OR_4:
1150 case BUILT_IN_SYNC_FETCH_AND_OR_8:
1151 case BUILT_IN_SYNC_FETCH_AND_OR_16:
1152
1153 case BUILT_IN_SYNC_FETCH_AND_AND_1:
1154 case BUILT_IN_SYNC_FETCH_AND_AND_2:
1155 case BUILT_IN_SYNC_FETCH_AND_AND_4:
1156 case BUILT_IN_SYNC_FETCH_AND_AND_8:
1157 case BUILT_IN_SYNC_FETCH_AND_AND_16:
1158
1159 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
1160 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
1161 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
1162 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
1163 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
1164
1165 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
1166 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
1167 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
1168 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
1169
1170 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
1171 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
1172 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
1173 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
1174 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
1175
1176 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
1177 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
1178 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
1179 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
1180 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
1181
1182 case BUILT_IN_SYNC_OR_AND_FETCH_1:
1183 case BUILT_IN_SYNC_OR_AND_FETCH_2:
1184 case BUILT_IN_SYNC_OR_AND_FETCH_4:
1185 case BUILT_IN_SYNC_OR_AND_FETCH_8:
1186 case BUILT_IN_SYNC_OR_AND_FETCH_16:
1187
1188 case BUILT_IN_SYNC_AND_AND_FETCH_1:
1189 case BUILT_IN_SYNC_AND_AND_FETCH_2:
1190 case BUILT_IN_SYNC_AND_AND_FETCH_4:
1191 case BUILT_IN_SYNC_AND_AND_FETCH_8:
1192 case BUILT_IN_SYNC_AND_AND_FETCH_16:
1193
1194 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
1195 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
1196 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
1197 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
1198 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
1199
1200 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
1201 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
1202 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
1203 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
1204
1205 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
1206 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
1207 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
1208 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
1209 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
1210
1211 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
1212 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
1213 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
1214 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
1215 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
1216
1217 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
1218 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
1219 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
1220 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
1221 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
1222
1223 case BUILT_IN_SYNC_LOCK_RELEASE_1:
1224 case BUILT_IN_SYNC_LOCK_RELEASE_2:
1225 case BUILT_IN_SYNC_LOCK_RELEASE_4:
1226 case BUILT_IN_SYNC_LOCK_RELEASE_8:
1227 case BUILT_IN_SYNC_LOCK_RELEASE_16:
1228
1229 case BUILT_IN_ATOMIC_EXCHANGE_1:
1230 case BUILT_IN_ATOMIC_EXCHANGE_2:
1231 case BUILT_IN_ATOMIC_EXCHANGE_4:
1232 case BUILT_IN_ATOMIC_EXCHANGE_8:
1233 case BUILT_IN_ATOMIC_EXCHANGE_16:
1234
1235 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
1236 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
1237 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
1238 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
1239 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
1240
1241 case BUILT_IN_ATOMIC_STORE_1:
1242 case BUILT_IN_ATOMIC_STORE_2:
1243 case BUILT_IN_ATOMIC_STORE_4:
1244 case BUILT_IN_ATOMIC_STORE_8:
1245 case BUILT_IN_ATOMIC_STORE_16:
1246
1247 case BUILT_IN_ATOMIC_ADD_FETCH_1:
1248 case BUILT_IN_ATOMIC_ADD_FETCH_2:
1249 case BUILT_IN_ATOMIC_ADD_FETCH_4:
1250 case BUILT_IN_ATOMIC_ADD_FETCH_8:
1251 case BUILT_IN_ATOMIC_ADD_FETCH_16:
1252
1253 case BUILT_IN_ATOMIC_SUB_FETCH_1:
1254 case BUILT_IN_ATOMIC_SUB_FETCH_2:
1255 case BUILT_IN_ATOMIC_SUB_FETCH_4:
1256 case BUILT_IN_ATOMIC_SUB_FETCH_8:
1257 case BUILT_IN_ATOMIC_SUB_FETCH_16:
1258
1259 case BUILT_IN_ATOMIC_AND_FETCH_1:
1260 case BUILT_IN_ATOMIC_AND_FETCH_2:
1261 case BUILT_IN_ATOMIC_AND_FETCH_4:
1262 case BUILT_IN_ATOMIC_AND_FETCH_8:
1263 case BUILT_IN_ATOMIC_AND_FETCH_16:
1264
1265 case BUILT_IN_ATOMIC_NAND_FETCH_1:
1266 case BUILT_IN_ATOMIC_NAND_FETCH_2:
1267 case BUILT_IN_ATOMIC_NAND_FETCH_4:
1268 case BUILT_IN_ATOMIC_NAND_FETCH_8:
1269 case BUILT_IN_ATOMIC_NAND_FETCH_16:
1270
1271 case BUILT_IN_ATOMIC_XOR_FETCH_1:
1272 case BUILT_IN_ATOMIC_XOR_FETCH_2:
1273 case BUILT_IN_ATOMIC_XOR_FETCH_4:
1274 case BUILT_IN_ATOMIC_XOR_FETCH_8:
1275 case BUILT_IN_ATOMIC_XOR_FETCH_16:
1276
1277 case BUILT_IN_ATOMIC_OR_FETCH_1:
1278 case BUILT_IN_ATOMIC_OR_FETCH_2:
1279 case BUILT_IN_ATOMIC_OR_FETCH_4:
1280 case BUILT_IN_ATOMIC_OR_FETCH_8:
1281 case BUILT_IN_ATOMIC_OR_FETCH_16:
1282
1283 case BUILT_IN_ATOMIC_FETCH_ADD_1:
1284 case BUILT_IN_ATOMIC_FETCH_ADD_2:
1285 case BUILT_IN_ATOMIC_FETCH_ADD_4:
1286 case BUILT_IN_ATOMIC_FETCH_ADD_8:
1287 case BUILT_IN_ATOMIC_FETCH_ADD_16:
1288
1289 case BUILT_IN_ATOMIC_FETCH_SUB_1:
1290 case BUILT_IN_ATOMIC_FETCH_SUB_2:
1291 case BUILT_IN_ATOMIC_FETCH_SUB_4:
1292 case BUILT_IN_ATOMIC_FETCH_SUB_8:
1293 case BUILT_IN_ATOMIC_FETCH_SUB_16:
1294
1295 case BUILT_IN_ATOMIC_FETCH_AND_1:
1296 case BUILT_IN_ATOMIC_FETCH_AND_2:
1297 case BUILT_IN_ATOMIC_FETCH_AND_4:
1298 case BUILT_IN_ATOMIC_FETCH_AND_8:
1299 case BUILT_IN_ATOMIC_FETCH_AND_16:
1300
1301 case BUILT_IN_ATOMIC_FETCH_NAND_1:
1302 case BUILT_IN_ATOMIC_FETCH_NAND_2:
1303 case BUILT_IN_ATOMIC_FETCH_NAND_4:
1304 case BUILT_IN_ATOMIC_FETCH_NAND_8:
1305 case BUILT_IN_ATOMIC_FETCH_NAND_16:
1306
1307 case BUILT_IN_ATOMIC_FETCH_XOR_1:
1308 case BUILT_IN_ATOMIC_FETCH_XOR_2:
1309 case BUILT_IN_ATOMIC_FETCH_XOR_4:
1310 case BUILT_IN_ATOMIC_FETCH_XOR_8:
1311 case BUILT_IN_ATOMIC_FETCH_XOR_16:
1312
1313 case BUILT_IN_ATOMIC_FETCH_OR_1:
1314 case BUILT_IN_ATOMIC_FETCH_OR_2:
1315 case BUILT_IN_ATOMIC_FETCH_OR_4:
1316 case BUILT_IN_ATOMIC_FETCH_OR_8:
1317 case BUILT_IN_ATOMIC_FETCH_OR_16:
1318 {
1319 dest = gimple_call_arg (call, 0);
1320 /* So DEST represents the address of a memory location.
1321 instrument_derefs wants the memory location, so lets
1322 dereference the address DEST before handing it to
1323 instrument_derefs. */
1324 if (TREE_CODE (dest) == ADDR_EXPR)
1325 dest = TREE_OPERAND (dest, 0);
1326 else if (TREE_CODE (dest) == SSA_NAME)
1327 dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
1328 dest, build_int_cst (TREE_TYPE (dest), 0));
1329 else
1330 gcc_unreachable ();
1331
1332 instrument_derefs (iter, dest, loc, is_store);
1333 return false;
1334 }
1335
1336 default:
1337 /* The other builtins memory access are not instrumented in this
1338 function because they either don't have any length parameter,
1339 or their length parameter is just a limit. */
1340 break;
1341 }
1342
1343 if (len != NULL_TREE)
1344 {
1345 if (source0 != NULL_TREE)
1346 instrument_mem_region_access (source0, len, iter,
1347 loc, /*is_store=*/false);
1348 if (source1 != NULL_TREE)
1349 instrument_mem_region_access (source1, len, iter,
1350 loc, /*is_store=*/false);
1351 else if (dest != NULL_TREE)
1352 instrument_mem_region_access (dest, len, iter,
1353 loc, /*is_store=*/true);
1354
1355 *iter = gsi_for_stmt (call);
1356 return false;
1357 }
1358 return false;
1359 }
1360
1361 /* Instrument the assignment statement ITER if it is subject to
1362 instrumentation. */
1363
1364 static void
1365 instrument_assignment (gimple_stmt_iterator *iter)
1366 {
1367 gimple s = gsi_stmt (*iter);
1368
1369 gcc_assert (gimple_assign_single_p (s));
1370
1371 if (gimple_store_p (s))
1372 instrument_derefs (iter, gimple_assign_lhs (s),
1373 gimple_location (s), true);
1374 if (gimple_assign_load_p (s))
1375 instrument_derefs (iter, gimple_assign_rhs1 (s),
1376 gimple_location (s), false);
1377 }
1378
1379 /* Instrument the function call pointed to by the iterator ITER, if it
1380 is subject to instrumentation. At the moment, the only function
1381 calls that are instrumented are some built-in functions that access
1382 memory. Look at instrument_builtin_call to learn more.
1383
1384 Upon completion return TRUE iff *ITER was advanced to the statement
1385 following the one it was originally pointing to. */
1386
1387 static bool
1388 maybe_instrument_call (gimple_stmt_iterator *iter)
1389 {
1390 gimple stmt = gsi_stmt (*iter);
1391 bool is_builtin = is_gimple_builtin_call (stmt);
1392 if (is_builtin
1393 && instrument_builtin_call (iter))
1394 return true;
1395 if (gimple_call_noreturn_p (stmt))
1396 {
1397 if (is_builtin)
1398 {
1399 tree callee = gimple_call_fndecl (stmt);
1400 switch (DECL_FUNCTION_CODE (callee))
1401 {
1402 case BUILT_IN_UNREACHABLE:
1403 case BUILT_IN_TRAP:
1404 /* Don't instrument these. */
1405 return false;
1406 }
1407 }
1408 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
1409 gimple g = gimple_build_call (decl, 0);
1410 gimple_set_location (g, gimple_location (stmt));
1411 gsi_insert_before (iter, g, GSI_SAME_STMT);
1412 }
1413 return false;
1414 }
1415
1416 /* asan: this looks too complex. Can this be done simpler? */
1417 /* Transform
1418 1) Memory references.
1419 2) BUILTIN_ALLOCA calls.
1420 */
1421
1422 static void
1423 transform_statements (void)
1424 {
1425 basic_block bb;
1426 gimple_stmt_iterator i;
1427 int saved_last_basic_block = last_basic_block;
1428
1429 FOR_EACH_BB (bb)
1430 {
1431 if (bb->index >= saved_last_basic_block) continue;
1432 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
1433 {
1434 gimple s = gsi_stmt (i);
1435
1436 if (gimple_assign_single_p (s))
1437 instrument_assignment (&i);
1438 else if (is_gimple_call (s))
1439 {
1440 if (maybe_instrument_call (&i))
1441 /* Avoid gsi_next (&i), because maybe_instrument_call
1442 advanced the I iterator already. */
1443 continue;
1444 }
1445 gsi_next (&i);
1446 }
1447 }
1448 }
1449
1450 /* Build
1451 struct __asan_global
1452 {
1453 const void *__beg;
1454 uptr __size;
1455 uptr __size_with_redzone;
1456 const void *__name;
1457 uptr __has_dynamic_init;
1458 } type. */
1459
1460 static tree
1461 asan_global_struct (void)
1462 {
1463 static const char *field_names[5]
1464 = { "__beg", "__size", "__size_with_redzone",
1465 "__name", "__has_dynamic_init" };
1466 tree fields[5], ret;
1467 int i;
1468
1469 ret = make_node (RECORD_TYPE);
1470 for (i = 0; i < 5; i++)
1471 {
1472 fields[i]
1473 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
1474 get_identifier (field_names[i]),
1475 (i == 0 || i == 3) ? const_ptr_type_node
1476 : build_nonstandard_integer_type (POINTER_SIZE, 1));
1477 DECL_CONTEXT (fields[i]) = ret;
1478 if (i)
1479 DECL_CHAIN (fields[i - 1]) = fields[i];
1480 }
1481 TYPE_FIELDS (ret) = fields[0];
1482 TYPE_NAME (ret) = get_identifier ("__asan_global");
1483 layout_type (ret);
1484 return ret;
1485 }
1486
1487 /* Append description of a single global DECL into vector V.
1488 TYPE is __asan_global struct type as returned by asan_global_struct. */
1489
1490 static void
1491 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
1492 {
1493 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
1494 unsigned HOST_WIDE_INT size;
1495 tree str_cst, refdecl = decl;
1496 vec<constructor_elt, va_gc> *vinner = NULL;
1497
1498 if (!asan_pp_initialized)
1499 asan_pp_initialize ();
1500
1501 pp_clear_output_area (&asan_pp);
1502 if (DECL_NAME (decl))
1503 pp_base_tree_identifier (&asan_pp, DECL_NAME (decl));
1504 else
1505 pp_string (&asan_pp, "<unknown>");
1506 pp_space (&asan_pp);
1507 pp_left_paren (&asan_pp);
1508 pp_string (&asan_pp, main_input_filename);
1509 pp_right_paren (&asan_pp);
1510 str_cst = asan_pp_string ();
1511
1512 if (asan_needs_local_alias (decl))
1513 {
1514 char buf[20];
1515 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
1516 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
1517 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
1518 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
1519 TREE_READONLY (refdecl) = TREE_READONLY (decl);
1520 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
1521 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
1522 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
1523 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
1524 TREE_STATIC (refdecl) = 1;
1525 TREE_PUBLIC (refdecl) = 0;
1526 TREE_USED (refdecl) = 1;
1527 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
1528 }
1529
1530 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
1531 fold_convert (const_ptr_type_node,
1532 build_fold_addr_expr (refdecl)));
1533 size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
1534 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
1535 size += asan_red_zone_size (size);
1536 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
1537 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
1538 fold_convert (const_ptr_type_node, str_cst));
1539 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, 0));
1540 init = build_constructor (type, vinner);
1541 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
1542 }
1543
1544 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
1545 void
1546 initialize_sanitizer_builtins (void)
1547 {
1548 tree decl;
1549
1550 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
1551 return;
1552
1553 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
1554 tree BT_FN_VOID_PTR
1555 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1556 tree BT_FN_VOID_PTR_PTRMODE
1557 = build_function_type_list (void_type_node, ptr_type_node,
1558 build_nonstandard_integer_type (POINTER_SIZE,
1559 1), NULL_TREE);
1560 tree BT_FN_VOID_INT
1561 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
1562 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
1563 tree BT_FN_IX_CONST_VPTR_INT[5];
1564 tree BT_FN_IX_VPTR_IX_INT[5];
1565 tree BT_FN_VOID_VPTR_IX_INT[5];
1566 tree vptr
1567 = build_pointer_type (build_qualified_type (void_type_node,
1568 TYPE_QUAL_VOLATILE));
1569 tree cvptr
1570 = build_pointer_type (build_qualified_type (void_type_node,
1571 TYPE_QUAL_VOLATILE
1572 |TYPE_QUAL_CONST));
1573 tree boolt
1574 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
1575 int i;
1576 for (i = 0; i < 5; i++)
1577 {
1578 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
1579 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
1580 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
1581 integer_type_node, integer_type_node,
1582 NULL_TREE);
1583 BT_FN_IX_CONST_VPTR_INT[i]
1584 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
1585 BT_FN_IX_VPTR_IX_INT[i]
1586 = build_function_type_list (ix, vptr, ix, integer_type_node,
1587 NULL_TREE);
1588 BT_FN_VOID_VPTR_IX_INT[i]
1589 = build_function_type_list (void_type_node, vptr, ix,
1590 integer_type_node, NULL_TREE);
1591 }
1592 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
1593 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
1594 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
1595 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
1596 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
1597 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
1598 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
1599 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
1600 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
1601 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
1602 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
1603 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
1604 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
1605 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
1606 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
1607 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
1608 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
1609 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
1610 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
1611 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
1612 #undef ATTR_NOTHROW_LEAF_LIST
1613 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
1614 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
1615 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
1616 #undef DEF_SANITIZER_BUILTIN
1617 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
1618 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
1619 BUILT_IN_NORMAL, NAME, NULL_TREE); \
1620 set_call_expr_flags (decl, ATTRS); \
1621 set_builtin_decl (ENUM, decl, true);
1622
1623 #include "sanitizer.def"
1624
1625 #undef DEF_SANITIZER_BUILTIN
1626 }
1627
1628 /* Called via htab_traverse. Count number of emitted
1629 STRING_CSTs in the constant hash table. */
1630
1631 static int
1632 count_string_csts (void **slot, void *data)
1633 {
1634 struct constant_descriptor_tree *desc
1635 = (struct constant_descriptor_tree *) *slot;
1636 if (TREE_CODE (desc->value) == STRING_CST
1637 && TREE_ASM_WRITTEN (desc->value)
1638 && asan_protect_global (desc->value))
1639 ++*((unsigned HOST_WIDE_INT *) data);
1640 return 1;
1641 }
1642
1643 /* Helper structure to pass two parameters to
1644 add_string_csts. */
1645
1646 struct asan_add_string_csts_data
1647 {
1648 tree type;
1649 vec<constructor_elt, va_gc> *v;
1650 };
1651
1652 /* Called via htab_traverse. Call asan_add_global
1653 on emitted STRING_CSTs from the constant hash table. */
1654
1655 static int
1656 add_string_csts (void **slot, void *data)
1657 {
1658 struct constant_descriptor_tree *desc
1659 = (struct constant_descriptor_tree *) *slot;
1660 if (TREE_CODE (desc->value) == STRING_CST
1661 && TREE_ASM_WRITTEN (desc->value)
1662 && asan_protect_global (desc->value))
1663 {
1664 struct asan_add_string_csts_data *aascd
1665 = (struct asan_add_string_csts_data *) data;
1666 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
1667 aascd->type, aascd->v);
1668 }
1669 return 1;
1670 }
1671
1672 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
1673 invoke ggc_collect. */
1674 static GTY(()) tree asan_ctor_statements;
1675
1676 /* Module-level instrumentation.
1677 - Insert __asan_init() into the list of CTORs.
1678 - TODO: insert redzones around globals.
1679 */
1680
1681 void
1682 asan_finish_file (void)
1683 {
1684 struct varpool_node *vnode;
1685 unsigned HOST_WIDE_INT gcount = 0;
1686
1687 if (shadow_ptr_types[0] == NULL_TREE)
1688 asan_init_shadow_ptr_types ();
1689 /* Avoid instrumenting code in the asan ctors/dtors.
1690 We don't need to insert padding after the description strings,
1691 nor after .LASAN* array. */
1692 flag_asan = 0;
1693
1694 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
1695 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
1696 FOR_EACH_DEFINED_VARIABLE (vnode)
1697 if (asan_protect_global (vnode->symbol.decl))
1698 ++gcount;
1699 htab_t const_desc_htab = constant_pool_htab ();
1700 htab_traverse (const_desc_htab, count_string_csts, &gcount);
1701 if (gcount)
1702 {
1703 tree type = asan_global_struct (), var, ctor;
1704 tree uptr = build_nonstandard_integer_type (POINTER_SIZE, 1);
1705 tree dtor_statements = NULL_TREE;
1706 vec<constructor_elt, va_gc> *v;
1707 char buf[20];
1708
1709 type = build_array_type_nelts (type, gcount);
1710 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
1711 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
1712 type);
1713 TREE_STATIC (var) = 1;
1714 TREE_PUBLIC (var) = 0;
1715 DECL_ARTIFICIAL (var) = 1;
1716 DECL_IGNORED_P (var) = 1;
1717 vec_alloc (v, gcount);
1718 FOR_EACH_DEFINED_VARIABLE (vnode)
1719 if (asan_protect_global (vnode->symbol.decl))
1720 asan_add_global (vnode->symbol.decl, TREE_TYPE (type), v);
1721 struct asan_add_string_csts_data aascd;
1722 aascd.type = TREE_TYPE (type);
1723 aascd.v = v;
1724 htab_traverse (const_desc_htab, add_string_csts, &aascd);
1725 ctor = build_constructor (type, v);
1726 TREE_CONSTANT (ctor) = 1;
1727 TREE_STATIC (ctor) = 1;
1728 DECL_INITIAL (var) = ctor;
1729 varpool_assemble_decl (varpool_node_for_decl (var));
1730
1731 fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
1732 append_to_statement_list (build_call_expr (fn, 2,
1733 build_fold_addr_expr (var),
1734 build_int_cst (uptr, gcount)),
1735 &asan_ctor_statements);
1736
1737 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
1738 append_to_statement_list (build_call_expr (fn, 2,
1739 build_fold_addr_expr (var),
1740 build_int_cst (uptr, gcount)),
1741 &dtor_statements);
1742 cgraph_build_static_cdtor ('D', dtor_statements,
1743 MAX_RESERVED_INIT_PRIORITY - 1);
1744 }
1745 cgraph_build_static_cdtor ('I', asan_ctor_statements,
1746 MAX_RESERVED_INIT_PRIORITY - 1);
1747 flag_asan = 1;
1748 }
1749
1750 /* Instrument the current function. */
1751
1752 static unsigned int
1753 asan_instrument (void)
1754 {
1755 if (shadow_ptr_types[0] == NULL_TREE)
1756 asan_init_shadow_ptr_types ();
1757 transform_statements ();
1758 return 0;
1759 }
1760
1761 static bool
1762 gate_asan (void)
1763 {
1764 return flag_asan != 0
1765 && !lookup_attribute ("no_address_safety_analysis",
1766 DECL_ATTRIBUTES (current_function_decl));
1767 }
1768
1769 struct gimple_opt_pass pass_asan =
1770 {
1771 {
1772 GIMPLE_PASS,
1773 "asan", /* name */
1774 OPTGROUP_NONE, /* optinfo_flags */
1775 gate_asan, /* gate */
1776 asan_instrument, /* execute */
1777 NULL, /* sub */
1778 NULL, /* next */
1779 0, /* static_pass_number */
1780 TV_NONE, /* tv_id */
1781 PROP_ssa | PROP_cfg | PROP_gimple_leh,/* properties_required */
1782 0, /* properties_provided */
1783 0, /* properties_destroyed */
1784 0, /* todo_flags_start */
1785 TODO_verify_flow | TODO_verify_stmts
1786 | TODO_update_ssa /* todo_flags_finish */
1787 }
1788 };
1789
1790 static bool
1791 gate_asan_O0 (void)
1792 {
1793 return !optimize && gate_asan ();
1794 }
1795
1796 struct gimple_opt_pass pass_asan_O0 =
1797 {
1798 {
1799 GIMPLE_PASS,
1800 "asan0", /* name */
1801 OPTGROUP_NONE, /* optinfo_flags */
1802 gate_asan_O0, /* gate */
1803 asan_instrument, /* execute */
1804 NULL, /* sub */
1805 NULL, /* next */
1806 0, /* static_pass_number */
1807 TV_NONE, /* tv_id */
1808 PROP_ssa | PROP_cfg | PROP_gimple_leh,/* properties_required */
1809 0, /* properties_provided */
1810 0, /* properties_destroyed */
1811 0, /* todo_flags_start */
1812 TODO_verify_flow | TODO_verify_stmts
1813 | TODO_update_ssa /* todo_flags_finish */
1814 }
1815 };
1816
1817 #include "gt-asan.h"