Daily bump.
[gcc.git] / gcc / asan.c
1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012, 2013 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "gimple.h"
26 #include "tree-iterator.h"
27 #include "tree-flow.h"
28 #include "tree-pass.h"
29 #include "asan.h"
30 #include "gimple-pretty-print.h"
31 #include "target.h"
32 #include "expr.h"
33 #include "optabs.h"
34 #include "output.h"
35 #include "tm_p.h"
36 #include "langhooks.h"
37
38 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
39 with <2x slowdown on average.
40
41 The tool consists of two parts:
42 instrumentation module (this file) and a run-time library.
43 The instrumentation module adds a run-time check before every memory insn.
44 For a 8- or 16- byte load accessing address X:
45 ShadowAddr = (X >> 3) + Offset
46 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
47 if (ShadowValue)
48 __asan_report_load8(X);
49 For a load of N bytes (N=1, 2 or 4) from address X:
50 ShadowAddr = (X >> 3) + Offset
51 ShadowValue = *(char*)ShadowAddr;
52 if (ShadowValue)
53 if ((X & 7) + N - 1 > ShadowValue)
54 __asan_report_loadN(X);
55 Stores are instrumented similarly, but using __asan_report_storeN functions.
56 A call too __asan_init() is inserted to the list of module CTORs.
57
58 The run-time library redefines malloc (so that redzone are inserted around
59 the allocated memory) and free (so that reuse of free-ed memory is delayed),
60 provides __asan_report* and __asan_init functions.
61
62 Read more:
63 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
64
65 The current implementation supports detection of out-of-bounds and
66 use-after-free in the heap, on the stack and for global variables.
67
68 [Protection of stack variables]
69
70 To understand how detection of out-of-bounds and use-after-free works
71 for stack variables, lets look at this example on x86_64 where the
72 stack grows downward:
73
74 int
75 foo ()
76 {
77 char a[23] = {0};
78 int b[2] = {0};
79
80 a[5] = 1;
81 b[1] = 2;
82
83 return a[5] + b[1];
84 }
85
86 For this function, the stack protected by asan will be organized as
87 follows, from the top of the stack to the bottom:
88
89 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
90
91 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
92 the next slot be 32 bytes aligned; this one is called Partial
93 Redzone; this 32 bytes alignment is an asan constraint]
94
95 Slot 3/ [24 bytes for variable 'a']
96
97 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
98
99 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
100
101 Slot 6/ [8 bytes for variable 'b']
102
103 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
104 'LEFT RedZone']
105
106 The 32 bytes of LEFT red zone at the bottom of the stack can be
107 decomposed as such:
108
109 1/ The first 8 bytes contain a magical asan number that is always
110 0x41B58AB3.
111
112 2/ The following 8 bytes contains a pointer to a string (to be
113 parsed at runtime by the runtime asan library), which format is
114 the following:
115
116 "<function-name> <space> <num-of-variables-on-the-stack>
117 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
118 <length-of-var-in-bytes> ){n} "
119
120 where '(...){n}' means the content inside the parenthesis occurs 'n'
121 times, with 'n' being the number of variables on the stack.
122
123 3/ The following 16 bytes of the red zone have no particular
124 format.
125
126 The shadow memory for that stack layout is going to look like this:
127
128 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
129 The F1 byte pattern is a magic number called
130 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
131 the memory for that shadow byte is part of a the LEFT red zone
132 intended to seat at the bottom of the variables on the stack.
133
134 - content of shadow memory 8 bytes for slots 6 and 5:
135 0xF4F4F400. The F4 byte pattern is a magic number
136 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
137 memory region for this shadow byte is a PARTIAL red zone
138 intended to pad a variable A, so that the slot following
139 {A,padding} is 32 bytes aligned.
140
141 Note that the fact that the least significant byte of this
142 shadow memory content is 00 means that 8 bytes of its
143 corresponding memory (which corresponds to the memory of
144 variable 'b') is addressable.
145
146 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
147 The F2 byte pattern is a magic number called
148 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
149 region for this shadow byte is a MIDDLE red zone intended to
150 seat between two 32 aligned slots of {variable,padding}.
151
152 - content of shadow memory 8 bytes for slot 3 and 2:
153 0xF4000000. This represents is the concatenation of
154 variable 'a' and the partial red zone following it, like what we
155 had for variable 'b'. The least significant 3 bytes being 00
156 means that the 3 bytes of variable 'a' are addressable.
157
158 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
159 The F3 byte pattern is a magic number called
160 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
161 region for this shadow byte is a RIGHT red zone intended to seat
162 at the top of the variables of the stack.
163
164 Note that the real variable layout is done in expand_used_vars in
165 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
166 stack variables as well as the different red zones, emits some
167 prologue code to populate the shadow memory as to poison (mark as
168 non-accessible) the regions of the red zones and mark the regions of
169 stack variables as accessible, and emit some epilogue code to
170 un-poison (mark as accessible) the regions of red zones right before
171 the function exits.
172
173 [Protection of global variables]
174
175 The basic idea is to insert a red zone between two global variables
176 and install a constructor function that calls the asan runtime to do
177 the populating of the relevant shadow memory regions at load time.
178
179 So the global variables are laid out as to insert a red zone between
180 them. The size of the red zones is so that each variable starts on a
181 32 bytes boundary.
182
183 Then a constructor function is installed so that, for each global
184 variable, it calls the runtime asan library function
185 __asan_register_globals_with an instance of this type:
186
187 struct __asan_global
188 {
189 // Address of the beginning of the global variable.
190 const void *__beg;
191
192 // Initial size of the global variable.
193 uptr __size;
194
195 // Size of the global variable + size of the red zone. This
196 // size is 32 bytes aligned.
197 uptr __size_with_redzone;
198
199 // Name of the global variable.
200 const void *__name;
201
202 // This is always set to NULL for now.
203 uptr __has_dynamic_init;
204 }
205
206 A destructor function that calls the runtime asan library function
207 _asan_unregister_globals is also installed. */
208
209 alias_set_type asan_shadow_set = -1;
210
211 /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
212 alias set is used for all shadow memory accesses. */
213 static GTY(()) tree shadow_ptr_types[2];
214
215 /* Initialize shadow_ptr_types array. */
216
217 static void
218 asan_init_shadow_ptr_types (void)
219 {
220 asan_shadow_set = new_alias_set ();
221 shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
222 TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
223 shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
224 shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
225 TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
226 shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
227 initialize_sanitizer_builtins ();
228 }
229
230 /* Asan pretty-printer, used for buidling of the description STRING_CSTs. */
231 static pretty_printer asan_pp;
232 static bool asan_pp_initialized;
233
234 /* Initialize asan_pp. */
235
236 static void
237 asan_pp_initialize (void)
238 {
239 pp_construct (&asan_pp, /* prefix */NULL, /* line-width */0);
240 asan_pp_initialized = true;
241 }
242
243 /* Create ADDR_EXPR of STRING_CST with asan_pp text. */
244
245 static tree
246 asan_pp_string (void)
247 {
248 const char *buf = pp_base_formatted_text (&asan_pp);
249 size_t len = strlen (buf);
250 tree ret = build_string (len + 1, buf);
251 TREE_TYPE (ret)
252 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
253 build_index_type (size_int (len)));
254 TREE_READONLY (ret) = 1;
255 TREE_STATIC (ret) = 1;
256 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
257 }
258
259 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
260
261 static rtx
262 asan_shadow_cst (unsigned char shadow_bytes[4])
263 {
264 int i;
265 unsigned HOST_WIDE_INT val = 0;
266 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
267 for (i = 0; i < 4; i++)
268 val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
269 << (BITS_PER_UNIT * i);
270 return GEN_INT (trunc_int_for_mode (val, SImode));
271 }
272
273 /* Insert code to protect stack vars. The prologue sequence should be emitted
274 directly, epilogue sequence returned. BASE is the register holding the
275 stack base, against which OFFSETS array offsets are relative to, OFFSETS
276 array contains pairs of offsets in reverse order, always the end offset
277 of some gap that needs protection followed by starting offset,
278 and DECLS is an array of representative decls for each var partition.
279 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
280 elements long (OFFSETS include gap before the first variable as well
281 as gaps after each stack variable). */
282
283 rtx
284 asan_emit_stack_protection (rtx base, HOST_WIDE_INT *offsets, tree *decls,
285 int length)
286 {
287 rtx shadow_base, shadow_mem, ret, mem;
288 unsigned char shadow_bytes[4];
289 HOST_WIDE_INT base_offset = offsets[length - 1], offset, prev_offset;
290 HOST_WIDE_INT last_offset, last_size;
291 int l;
292 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
293 tree str_cst;
294
295 if (shadow_ptr_types[0] == NULL_TREE)
296 asan_init_shadow_ptr_types ();
297
298 /* First of all, prepare the description string. */
299 if (!asan_pp_initialized)
300 asan_pp_initialize ();
301
302 pp_clear_output_area (&asan_pp);
303 if (DECL_NAME (current_function_decl))
304 pp_base_tree_identifier (&asan_pp, DECL_NAME (current_function_decl));
305 else
306 pp_string (&asan_pp, "<unknown>");
307 pp_space (&asan_pp);
308 pp_decimal_int (&asan_pp, length / 2 - 1);
309 pp_space (&asan_pp);
310 for (l = length - 2; l; l -= 2)
311 {
312 tree decl = decls[l / 2 - 1];
313 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
314 pp_space (&asan_pp);
315 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
316 pp_space (&asan_pp);
317 if (DECL_P (decl) && DECL_NAME (decl))
318 {
319 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
320 pp_space (&asan_pp);
321 pp_base_tree_identifier (&asan_pp, DECL_NAME (decl));
322 }
323 else
324 pp_string (&asan_pp, "9 <unknown>");
325 pp_space (&asan_pp);
326 }
327 str_cst = asan_pp_string ();
328
329 /* Emit the prologue sequence. */
330 base = expand_binop (Pmode, add_optab, base, GEN_INT (base_offset),
331 NULL_RTX, 1, OPTAB_DIRECT);
332 mem = gen_rtx_MEM (ptr_mode, base);
333 emit_move_insn (mem, GEN_INT (ASAN_STACK_FRAME_MAGIC));
334 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
335 emit_move_insn (mem, expand_normal (str_cst));
336 shadow_base = expand_binop (Pmode, lshr_optab, base,
337 GEN_INT (ASAN_SHADOW_SHIFT),
338 NULL_RTX, 1, OPTAB_DIRECT);
339 shadow_base = expand_binop (Pmode, add_optab, shadow_base,
340 GEN_INT (targetm.asan_shadow_offset ()),
341 NULL_RTX, 1, OPTAB_DIRECT);
342 gcc_assert (asan_shadow_set != -1
343 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
344 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
345 set_mem_alias_set (shadow_mem, asan_shadow_set);
346 prev_offset = base_offset;
347 for (l = length; l; l -= 2)
348 {
349 if (l == 2)
350 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
351 offset = offsets[l - 1];
352 if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
353 {
354 int i;
355 HOST_WIDE_INT aoff
356 = base_offset + ((offset - base_offset)
357 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
358 shadow_mem = adjust_address (shadow_mem, VOIDmode,
359 (aoff - prev_offset)
360 >> ASAN_SHADOW_SHIFT);
361 prev_offset = aoff;
362 for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
363 if (aoff < offset)
364 {
365 if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
366 shadow_bytes[i] = 0;
367 else
368 shadow_bytes[i] = offset - aoff;
369 }
370 else
371 shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
372 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
373 offset = aoff;
374 }
375 while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
376 {
377 shadow_mem = adjust_address (shadow_mem, VOIDmode,
378 (offset - prev_offset)
379 >> ASAN_SHADOW_SHIFT);
380 prev_offset = offset;
381 memset (shadow_bytes, cur_shadow_byte, 4);
382 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
383 offset += ASAN_RED_ZONE_SIZE;
384 }
385 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
386 }
387 do_pending_stack_adjust ();
388
389 /* Construct epilogue sequence. */
390 start_sequence ();
391
392 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
393 set_mem_alias_set (shadow_mem, asan_shadow_set);
394 prev_offset = base_offset;
395 last_offset = base_offset;
396 last_size = 0;
397 for (l = length; l; l -= 2)
398 {
399 offset = base_offset + ((offsets[l - 1] - base_offset)
400 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
401 if (last_offset + last_size != offset)
402 {
403 shadow_mem = adjust_address (shadow_mem, VOIDmode,
404 (last_offset - prev_offset)
405 >> ASAN_SHADOW_SHIFT);
406 prev_offset = last_offset;
407 clear_storage (shadow_mem, GEN_INT (last_size >> ASAN_SHADOW_SHIFT),
408 BLOCK_OP_NORMAL);
409 last_offset = offset;
410 last_size = 0;
411 }
412 last_size += base_offset + ((offsets[l - 2] - base_offset)
413 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
414 - offset;
415 }
416 if (last_size)
417 {
418 shadow_mem = adjust_address (shadow_mem, VOIDmode,
419 (last_offset - prev_offset)
420 >> ASAN_SHADOW_SHIFT);
421 clear_storage (shadow_mem, GEN_INT (last_size >> ASAN_SHADOW_SHIFT),
422 BLOCK_OP_NORMAL);
423 }
424
425 do_pending_stack_adjust ();
426
427 ret = get_insns ();
428 end_sequence ();
429 return ret;
430 }
431
432 /* Return true if DECL, a global var, might be overridden and needs
433 therefore a local alias. */
434
435 static bool
436 asan_needs_local_alias (tree decl)
437 {
438 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
439 }
440
441 /* Return true if DECL is a VAR_DECL that should be protected
442 by Address Sanitizer, by appending a red zone with protected
443 shadow memory after it and aligning it to at least
444 ASAN_RED_ZONE_SIZE bytes. */
445
446 bool
447 asan_protect_global (tree decl)
448 {
449 rtx rtl, symbol;
450
451 if (TREE_CODE (decl) == STRING_CST)
452 {
453 /* Instrument all STRING_CSTs except those created
454 by asan_pp_string here. */
455 if (shadow_ptr_types[0] != NULL_TREE
456 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
457 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
458 return false;
459 return true;
460 }
461 if (TREE_CODE (decl) != VAR_DECL
462 /* TLS vars aren't statically protectable. */
463 || DECL_THREAD_LOCAL_P (decl)
464 /* Externs will be protected elsewhere. */
465 || DECL_EXTERNAL (decl)
466 || !DECL_RTL_SET_P (decl)
467 /* Comdat vars pose an ABI problem, we can't know if
468 the var that is selected by the linker will have
469 padding or not. */
470 || DECL_ONE_ONLY (decl)
471 /* Similarly for common vars. People can use -fno-common. */
472 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
473 /* Don't protect if using user section, often vars placed
474 into user section from multiple TUs are then assumed
475 to be an array of such vars, putting padding in there
476 breaks this assumption. */
477 || (DECL_SECTION_NAME (decl) != NULL_TREE
478 && !DECL_HAS_IMPLICIT_SECTION_NAME_P (decl))
479 || DECL_SIZE (decl) == 0
480 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
481 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
482 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE)
483 return false;
484
485 rtl = DECL_RTL (decl);
486 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
487 return false;
488 symbol = XEXP (rtl, 0);
489
490 if (CONSTANT_POOL_ADDRESS_P (symbol)
491 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
492 return false;
493
494 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
495 return false;
496
497 #ifndef ASM_OUTPUT_DEF
498 if (asan_needs_local_alias (decl))
499 return false;
500 #endif
501
502 return true;
503 }
504
505 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16}.
506 IS_STORE is either 1 (for a store) or 0 (for a load).
507 SIZE_IN_BYTES is one of 1, 2, 4, 8, 16. */
508
509 static tree
510 report_error_func (bool is_store, int size_in_bytes)
511 {
512 static enum built_in_function report[2][5]
513 = { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
514 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
515 BUILT_IN_ASAN_REPORT_LOAD16 },
516 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
517 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
518 BUILT_IN_ASAN_REPORT_STORE16 } };
519 return builtin_decl_implicit (report[is_store][exact_log2 (size_in_bytes)]);
520 }
521
522 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 2000 - 1)
523 #define PROB_ALWAYS (REG_BR_PROB_BASE)
524
525 /* Split the current basic block and create a condition statement
526 insertion point right before or after the statement pointed to by
527 ITER. Return an iterator to the point at which the caller might
528 safely insert the condition statement.
529
530 THEN_BLOCK must be set to the address of an uninitialized instance
531 of basic_block. The function will then set *THEN_BLOCK to the
532 'then block' of the condition statement to be inserted by the
533 caller.
534
535 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
536 block' of the condition statement to be inserted by the caller.
537
538 Note that *FALLTHROUGH_BLOCK is a new block that contains the
539 statements starting from *ITER, and *THEN_BLOCK is a new empty
540 block.
541
542 *ITER is adjusted to point to always point to the first statement
543 of the basic block * FALLTHROUGH_BLOCK. That statement is the
544 same as what ITER was pointing to prior to calling this function,
545 if BEFORE_P is true; otherwise, it is its following statement. */
546
547 static gimple_stmt_iterator
548 create_cond_insert_point (gimple_stmt_iterator *iter,
549 bool before_p,
550 bool then_more_likely_p,
551 basic_block *then_block,
552 basic_block *fallthrough_block)
553 {
554 gimple_stmt_iterator gsi = *iter;
555
556 if (!gsi_end_p (gsi) && before_p)
557 gsi_prev (&gsi);
558
559 basic_block cur_bb = gsi_bb (*iter);
560
561 edge e = split_block (cur_bb, gsi_stmt (gsi));
562
563 /* Get a hold on the 'condition block', the 'then block' and the
564 'else block'. */
565 basic_block cond_bb = e->src;
566 basic_block fallthru_bb = e->dest;
567 basic_block then_bb = create_empty_bb (cond_bb);
568
569 /* Set up the newly created 'then block'. */
570 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
571 int fallthrough_probability
572 = then_more_likely_p
573 ? PROB_VERY_UNLIKELY
574 : PROB_ALWAYS - PROB_VERY_UNLIKELY;
575 e->probability = PROB_ALWAYS - fallthrough_probability;
576 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
577
578 /* Set up the fallthrough basic block. */
579 e = find_edge (cond_bb, fallthru_bb);
580 e->flags = EDGE_FALSE_VALUE;
581 e->count = cond_bb->count;
582 e->probability = fallthrough_probability;
583
584 /* Update dominance info for the newly created then_bb; note that
585 fallthru_bb's dominance info has already been updated by
586 split_bock. */
587 if (dom_info_available_p (CDI_DOMINATORS))
588 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
589
590 *then_block = then_bb;
591 *fallthrough_block = fallthru_bb;
592 *iter = gsi_start_bb (fallthru_bb);
593
594 return gsi_last_bb (cond_bb);
595 }
596
597 /* Insert an if condition followed by a 'then block' right before the
598 statement pointed to by ITER. The fallthrough block -- which is the
599 else block of the condition as well as the destination of the
600 outcoming edge of the 'then block' -- starts with the statement
601 pointed to by ITER.
602
603 COND is the condition of the if.
604
605 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
606 'then block' is higher than the probability of the edge to the
607 fallthrough block.
608
609 Upon completion of the function, *THEN_BB is set to the newly
610 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
611 fallthrough block.
612
613 *ITER is adjusted to still point to the same statement it was
614 pointing to initially. */
615
616 static void
617 insert_if_then_before_iter (gimple cond,
618 gimple_stmt_iterator *iter,
619 bool then_more_likely_p,
620 basic_block *then_bb,
621 basic_block *fallthrough_bb)
622 {
623 gimple_stmt_iterator cond_insert_point =
624 create_cond_insert_point (iter,
625 /*before_p=*/true,
626 then_more_likely_p,
627 then_bb,
628 fallthrough_bb);
629 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
630 }
631
632 /* Instrument the memory access instruction BASE. Insert new
633 statements before or after ITER.
634
635 Note that the memory access represented by BASE can be either an
636 SSA_NAME, or a non-SSA expression. LOCATION is the source code
637 location. IS_STORE is TRUE for a store, FALSE for a load.
638 BEFORE_P is TRUE for inserting the instrumentation code before
639 ITER, FALSE for inserting it after ITER. SIZE_IN_BYTES is one of
640 1, 2, 4, 8, 16.
641
642 If BEFORE_P is TRUE, *ITER is arranged to still point to the
643 statement it was pointing to prior to calling this function,
644 otherwise, it points to the statement logically following it. */
645
646 static void
647 build_check_stmt (location_t location, tree base, gimple_stmt_iterator *iter,
648 bool before_p, bool is_store, int size_in_bytes)
649 {
650 gimple_stmt_iterator gsi;
651 basic_block then_bb, else_bb;
652 tree t, base_addr, shadow;
653 gimple g;
654 tree shadow_ptr_type = shadow_ptr_types[size_in_bytes == 16 ? 1 : 0];
655 tree shadow_type = TREE_TYPE (shadow_ptr_type);
656 tree uintptr_type
657 = build_nonstandard_integer_type (TYPE_PRECISION (TREE_TYPE (base)), 1);
658 tree base_ssa = base;
659
660 /* Get an iterator on the point where we can add the condition
661 statement for the instrumentation. */
662 gsi = create_cond_insert_point (iter, before_p,
663 /*then_more_likely_p=*/false,
664 &then_bb,
665 &else_bb);
666
667 base = unshare_expr (base);
668
669 /* BASE can already be an SSA_NAME; in that case, do not create a
670 new SSA_NAME for it. */
671 if (TREE_CODE (base) != SSA_NAME)
672 {
673 g = gimple_build_assign_with_ops (TREE_CODE (base),
674 make_ssa_name (TREE_TYPE (base), NULL),
675 base, NULL_TREE);
676 gimple_set_location (g, location);
677 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
678 base_ssa = gimple_assign_lhs (g);
679 }
680
681 g = gimple_build_assign_with_ops (NOP_EXPR,
682 make_ssa_name (uintptr_type, NULL),
683 base_ssa, NULL_TREE);
684 gimple_set_location (g, location);
685 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
686 base_addr = gimple_assign_lhs (g);
687
688 /* Build
689 (base_addr >> ASAN_SHADOW_SHIFT) + targetm.asan_shadow_offset (). */
690
691 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
692 g = gimple_build_assign_with_ops (RSHIFT_EXPR,
693 make_ssa_name (uintptr_type, NULL),
694 base_addr, t);
695 gimple_set_location (g, location);
696 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
697
698 t = build_int_cst (uintptr_type, targetm.asan_shadow_offset ());
699 g = gimple_build_assign_with_ops (PLUS_EXPR,
700 make_ssa_name (uintptr_type, NULL),
701 gimple_assign_lhs (g), t);
702 gimple_set_location (g, location);
703 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
704
705 g = gimple_build_assign_with_ops (NOP_EXPR,
706 make_ssa_name (shadow_ptr_type, NULL),
707 gimple_assign_lhs (g), NULL_TREE);
708 gimple_set_location (g, location);
709 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
710
711 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
712 build_int_cst (shadow_ptr_type, 0));
713 g = gimple_build_assign_with_ops (MEM_REF,
714 make_ssa_name (shadow_type, NULL),
715 t, NULL_TREE);
716 gimple_set_location (g, location);
717 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
718 shadow = gimple_assign_lhs (g);
719
720 if (size_in_bytes < 8)
721 {
722 /* Slow path for 1, 2 and 4 byte accesses.
723 Test (shadow != 0)
724 & ((base_addr & 7) + (size_in_bytes - 1)) >= shadow). */
725 g = gimple_build_assign_with_ops (NE_EXPR,
726 make_ssa_name (boolean_type_node,
727 NULL),
728 shadow,
729 build_int_cst (shadow_type, 0));
730 gimple_set_location (g, location);
731 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
732 t = gimple_assign_lhs (g);
733
734 g = gimple_build_assign_with_ops (BIT_AND_EXPR,
735 make_ssa_name (uintptr_type,
736 NULL),
737 base_addr,
738 build_int_cst (uintptr_type, 7));
739 gimple_set_location (g, location);
740 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
741
742 g = gimple_build_assign_with_ops (NOP_EXPR,
743 make_ssa_name (shadow_type,
744 NULL),
745 gimple_assign_lhs (g), NULL_TREE);
746 gimple_set_location (g, location);
747 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
748
749 if (size_in_bytes > 1)
750 {
751 g = gimple_build_assign_with_ops (PLUS_EXPR,
752 make_ssa_name (shadow_type,
753 NULL),
754 gimple_assign_lhs (g),
755 build_int_cst (shadow_type,
756 size_in_bytes - 1));
757 gimple_set_location (g, location);
758 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
759 }
760
761 g = gimple_build_assign_with_ops (GE_EXPR,
762 make_ssa_name (boolean_type_node,
763 NULL),
764 gimple_assign_lhs (g),
765 shadow);
766 gimple_set_location (g, location);
767 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
768
769 g = gimple_build_assign_with_ops (BIT_AND_EXPR,
770 make_ssa_name (boolean_type_node,
771 NULL),
772 t, gimple_assign_lhs (g));
773 gimple_set_location (g, location);
774 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
775 t = gimple_assign_lhs (g);
776 }
777 else
778 t = shadow;
779
780 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
781 NULL_TREE, NULL_TREE);
782 gimple_set_location (g, location);
783 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
784
785 /* Generate call to the run-time library (e.g. __asan_report_load8). */
786 gsi = gsi_start_bb (then_bb);
787 g = gimple_build_call (report_error_func (is_store, size_in_bytes),
788 1, base_addr);
789 gimple_set_location (g, location);
790 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
791
792 *iter = gsi_start_bb (else_bb);
793 }
794
795 /* If T represents a memory access, add instrumentation code before ITER.
796 LOCATION is source code location.
797 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
798
799 static void
800 instrument_derefs (gimple_stmt_iterator *iter, tree t,
801 location_t location, bool is_store)
802 {
803 tree type, base;
804 HOST_WIDE_INT size_in_bytes;
805
806 type = TREE_TYPE (t);
807 switch (TREE_CODE (t))
808 {
809 case ARRAY_REF:
810 case COMPONENT_REF:
811 case INDIRECT_REF:
812 case MEM_REF:
813 break;
814 default:
815 return;
816 }
817
818 size_in_bytes = int_size_in_bytes (type);
819 if ((size_in_bytes & (size_in_bytes - 1)) != 0
820 || (unsigned HOST_WIDE_INT) size_in_bytes - 1 >= 16)
821 return;
822
823 HOST_WIDE_INT bitsize, bitpos;
824 tree offset;
825 enum machine_mode mode;
826 int volatilep = 0, unsignedp = 0;
827 get_inner_reference (t, &bitsize, &bitpos, &offset,
828 &mode, &unsignedp, &volatilep, false);
829 if (bitpos % (size_in_bytes * BITS_PER_UNIT)
830 || bitsize != size_in_bytes * BITS_PER_UNIT)
831 {
832 if (TREE_CODE (t) == COMPONENT_REF
833 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
834 {
835 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
836 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
837 TREE_OPERAND (t, 0), repr,
838 NULL_TREE), location, is_store);
839 }
840 return;
841 }
842
843 base = build_fold_addr_expr (t);
844 build_check_stmt (location, base, iter, /*before_p=*/true,
845 is_store, size_in_bytes);
846 }
847
848 /* Instrument an access to a contiguous memory region that starts at
849 the address pointed to by BASE, over a length of LEN (expressed in
850 the sizeof (*BASE) bytes). ITER points to the instruction before
851 which the instrumentation instructions must be inserted. LOCATION
852 is the source location that the instrumentation instructions must
853 have. If IS_STORE is true, then the memory access is a store;
854 otherwise, it's a load. */
855
856 static void
857 instrument_mem_region_access (tree base, tree len,
858 gimple_stmt_iterator *iter,
859 location_t location, bool is_store)
860 {
861 if (!POINTER_TYPE_P (TREE_TYPE (base))
862 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
863 || integer_zerop (len))
864 return;
865
866 gimple_stmt_iterator gsi = *iter;
867
868 basic_block fallthrough_bb = NULL, then_bb = NULL;
869 if (!is_gimple_constant (len))
870 {
871 /* So, the length of the memory area to asan-protect is
872 non-constant. Let's guard the generated instrumentation code
873 like:
874
875 if (len != 0)
876 {
877 //asan instrumentation code goes here.
878 }
879 // falltrough instructions, starting with *ITER. */
880
881 gimple g = gimple_build_cond (NE_EXPR,
882 len,
883 build_int_cst (TREE_TYPE (len), 0),
884 NULL_TREE, NULL_TREE);
885 gimple_set_location (g, location);
886 insert_if_then_before_iter (g, iter, /*then_more_likely_p=*/true,
887 &then_bb, &fallthrough_bb);
888 /* Note that fallthrough_bb starts with the statement that was
889 pointed to by ITER. */
890
891 /* The 'then block' of the 'if (len != 0) condition is where
892 we'll generate the asan instrumentation code now. */
893 gsi = gsi_start_bb (then_bb);
894 }
895
896 /* Instrument the beginning of the memory region to be accessed,
897 and arrange for the rest of the intrumentation code to be
898 inserted in the then block *after* the current gsi. */
899 build_check_stmt (location, base, &gsi, /*before_p=*/true, is_store, 1);
900
901 if (then_bb)
902 /* We are in the case where the length of the region is not
903 constant; so instrumentation code is being generated in the
904 'then block' of the 'if (len != 0) condition. Let's arrange
905 for the subsequent instrumentation statements to go in the
906 'then block'. */
907 gsi = gsi_last_bb (then_bb);
908 else
909 *iter = gsi;
910
911 /* We want to instrument the access at the end of the memory region,
912 which is at (base + len - 1). */
913
914 /* offset = len - 1; */
915 len = unshare_expr (len);
916 tree offset;
917 gimple_seq seq = NULL;
918 if (TREE_CODE (len) == INTEGER_CST)
919 offset = fold_build2 (MINUS_EXPR, size_type_node,
920 fold_convert (size_type_node, len),
921 build_int_cst (size_type_node, 1));
922 else
923 {
924 gimple g;
925 tree t;
926
927 if (TREE_CODE (len) != SSA_NAME)
928 {
929 t = make_ssa_name (TREE_TYPE (len), NULL);
930 g = gimple_build_assign_with_ops (TREE_CODE (len), t, len, NULL);
931 gimple_set_location (g, location);
932 gimple_seq_add_stmt_without_update (&seq, g);
933 len = t;
934 }
935 if (!useless_type_conversion_p (size_type_node, TREE_TYPE (len)))
936 {
937 t = make_ssa_name (size_type_node, NULL);
938 g = gimple_build_assign_with_ops (NOP_EXPR, t, len, NULL);
939 gimple_set_location (g, location);
940 gimple_seq_add_stmt_without_update (&seq, g);
941 len = t;
942 }
943
944 t = make_ssa_name (size_type_node, NULL);
945 g = gimple_build_assign_with_ops (MINUS_EXPR, t, len,
946 build_int_cst (size_type_node, 1));
947 gimple_set_location (g, location);
948 gimple_seq_add_stmt_without_update (&seq, g);
949 offset = gimple_assign_lhs (g);
950 }
951
952 /* _1 = base; */
953 base = unshare_expr (base);
954 gimple region_end =
955 gimple_build_assign_with_ops (TREE_CODE (base),
956 make_ssa_name (TREE_TYPE (base), NULL),
957 base, NULL);
958 gimple_set_location (region_end, location);
959 gimple_seq_add_stmt_without_update (&seq, region_end);
960 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
961 gsi_prev (&gsi);
962
963 /* _2 = _1 + offset; */
964 region_end =
965 gimple_build_assign_with_ops (POINTER_PLUS_EXPR,
966 make_ssa_name (TREE_TYPE (base), NULL),
967 gimple_assign_lhs (region_end),
968 offset);
969 gimple_set_location (region_end, location);
970 gsi_insert_after (&gsi, region_end, GSI_NEW_STMT);
971
972 /* instrument access at _2; */
973 build_check_stmt (location, gimple_assign_lhs (region_end),
974 &gsi, /*before_p=*/false, is_store, 1);
975 }
976
977 /* Instrument the call (to the builtin strlen function) pointed to by
978 ITER.
979
980 This function instruments the access to the first byte of the
981 argument, right before the call. After the call it instruments the
982 access to the last byte of the argument; it uses the result of the
983 call to deduce the offset of that last byte.
984
985 Upon completion, iff the call has actullay been instrumented, this
986 function returns TRUE and *ITER points to the statement logically
987 following the built-in strlen function call *ITER was initially
988 pointing to. Otherwise, the function returns FALSE and *ITER
989 remains unchanged. */
990
991 static bool
992 instrument_strlen_call (gimple_stmt_iterator *iter)
993 {
994 gimple call = gsi_stmt (*iter);
995 gcc_assert (is_gimple_call (call));
996
997 tree callee = gimple_call_fndecl (call);
998 gcc_assert (is_builtin_fn (callee)
999 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
1000 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STRLEN);
1001
1002 tree len = gimple_call_lhs (call);
1003 if (len == NULL)
1004 /* Some passes might clear the return value of the strlen call;
1005 bail out in that case. Return FALSE as we are not advancing
1006 *ITER. */
1007 return false;
1008 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (len)));
1009
1010 location_t loc = gimple_location (call);
1011 tree str_arg = gimple_call_arg (call, 0);
1012
1013 /* Instrument the access to the first byte of str_arg. i.e:
1014
1015 _1 = str_arg; instrument (_1); */
1016 gimple str_arg_ssa =
1017 gimple_build_assign_with_ops (NOP_EXPR,
1018 make_ssa_name (build_pointer_type
1019 (char_type_node), NULL),
1020 str_arg, NULL);
1021 gimple_set_location (str_arg_ssa, loc);
1022 gimple_stmt_iterator gsi = *iter;
1023 gsi_insert_before (&gsi, str_arg_ssa, GSI_NEW_STMT);
1024 build_check_stmt (loc, gimple_assign_lhs (str_arg_ssa), &gsi,
1025 /*before_p=*/false, /*is_store=*/false, 1);
1026
1027 /* If we initially had an instruction like:
1028
1029 int n = strlen (str)
1030
1031 we now want to instrument the access to str[n], after the
1032 instruction above.*/
1033
1034 /* So let's build the access to str[n] that is, access through the
1035 pointer_plus expr: (_1 + len). */
1036 gimple stmt =
1037 gimple_build_assign_with_ops (POINTER_PLUS_EXPR,
1038 make_ssa_name (TREE_TYPE (str_arg),
1039 NULL),
1040 gimple_assign_lhs (str_arg_ssa),
1041 len);
1042 gimple_set_location (stmt, loc);
1043 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
1044
1045 build_check_stmt (loc, gimple_assign_lhs (stmt), &gsi,
1046 /*before_p=*/false, /*is_store=*/false, 1);
1047
1048 /* Ensure that iter points to the statement logically following the
1049 one it was initially pointing to. */
1050 *iter = gsi;
1051 /* As *ITER has been advanced to point to the next statement, let's
1052 return true to inform transform_statements that it shouldn't
1053 advance *ITER anymore; otherwises it will skip that next
1054 statement, which wouldn't be instrumented. */
1055 return true;
1056 }
1057
1058 /* Instrument the call to a built-in memory access function that is
1059 pointed to by the iterator ITER.
1060
1061 Upon completion, return TRUE iff *ITER has been advanced to the
1062 statement following the one it was originally pointing to. */
1063
1064 static bool
1065 instrument_builtin_call (gimple_stmt_iterator *iter)
1066 {
1067 gimple call = gsi_stmt (*iter);
1068
1069 gcc_checking_assert (is_gimple_builtin_call (call));
1070
1071 tree callee = gimple_call_fndecl (call);
1072 location_t loc = gimple_location (call);
1073 tree source0 = NULL_TREE, source1 = NULL_TREE,
1074 dest = NULL_TREE, len = NULL_TREE;
1075 bool is_store = true;
1076
1077 switch (DECL_FUNCTION_CODE (callee))
1078 {
1079 /* (s, s, n) style memops. */
1080 case BUILT_IN_BCMP:
1081 case BUILT_IN_MEMCMP:
1082 source0 = gimple_call_arg (call, 0);
1083 source1 = gimple_call_arg (call, 1);
1084 len = gimple_call_arg (call, 2);
1085 break;
1086
1087 /* (src, dest, n) style memops. */
1088 case BUILT_IN_BCOPY:
1089 source0 = gimple_call_arg (call, 0);
1090 dest = gimple_call_arg (call, 1);
1091 len = gimple_call_arg (call, 2);
1092 break;
1093
1094 /* (dest, src, n) style memops. */
1095 case BUILT_IN_MEMCPY:
1096 case BUILT_IN_MEMCPY_CHK:
1097 case BUILT_IN_MEMMOVE:
1098 case BUILT_IN_MEMMOVE_CHK:
1099 case BUILT_IN_MEMPCPY:
1100 case BUILT_IN_MEMPCPY_CHK:
1101 dest = gimple_call_arg (call, 0);
1102 source0 = gimple_call_arg (call, 1);
1103 len = gimple_call_arg (call, 2);
1104 break;
1105
1106 /* (dest, n) style memops. */
1107 case BUILT_IN_BZERO:
1108 dest = gimple_call_arg (call, 0);
1109 len = gimple_call_arg (call, 1);
1110 break;
1111
1112 /* (dest, x, n) style memops*/
1113 case BUILT_IN_MEMSET:
1114 case BUILT_IN_MEMSET_CHK:
1115 dest = gimple_call_arg (call, 0);
1116 len = gimple_call_arg (call, 2);
1117 break;
1118
1119 case BUILT_IN_STRLEN:
1120 return instrument_strlen_call (iter);
1121
1122 /* And now the __atomic* and __sync builtins.
1123 These are handled differently from the classical memory memory
1124 access builtins above. */
1125
1126 case BUILT_IN_ATOMIC_LOAD_1:
1127 case BUILT_IN_ATOMIC_LOAD_2:
1128 case BUILT_IN_ATOMIC_LOAD_4:
1129 case BUILT_IN_ATOMIC_LOAD_8:
1130 case BUILT_IN_ATOMIC_LOAD_16:
1131 is_store = false;
1132 /* fall through. */
1133
1134 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
1135 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
1136 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
1137 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
1138 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
1139
1140 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
1141 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
1142 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
1143 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
1144 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
1145
1146 case BUILT_IN_SYNC_FETCH_AND_OR_1:
1147 case BUILT_IN_SYNC_FETCH_AND_OR_2:
1148 case BUILT_IN_SYNC_FETCH_AND_OR_4:
1149 case BUILT_IN_SYNC_FETCH_AND_OR_8:
1150 case BUILT_IN_SYNC_FETCH_AND_OR_16:
1151
1152 case BUILT_IN_SYNC_FETCH_AND_AND_1:
1153 case BUILT_IN_SYNC_FETCH_AND_AND_2:
1154 case BUILT_IN_SYNC_FETCH_AND_AND_4:
1155 case BUILT_IN_SYNC_FETCH_AND_AND_8:
1156 case BUILT_IN_SYNC_FETCH_AND_AND_16:
1157
1158 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
1159 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
1160 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
1161 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
1162 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
1163
1164 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
1165 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
1166 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
1167 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
1168
1169 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
1170 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
1171 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
1172 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
1173 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
1174
1175 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
1176 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
1177 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
1178 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
1179 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
1180
1181 case BUILT_IN_SYNC_OR_AND_FETCH_1:
1182 case BUILT_IN_SYNC_OR_AND_FETCH_2:
1183 case BUILT_IN_SYNC_OR_AND_FETCH_4:
1184 case BUILT_IN_SYNC_OR_AND_FETCH_8:
1185 case BUILT_IN_SYNC_OR_AND_FETCH_16:
1186
1187 case BUILT_IN_SYNC_AND_AND_FETCH_1:
1188 case BUILT_IN_SYNC_AND_AND_FETCH_2:
1189 case BUILT_IN_SYNC_AND_AND_FETCH_4:
1190 case BUILT_IN_SYNC_AND_AND_FETCH_8:
1191 case BUILT_IN_SYNC_AND_AND_FETCH_16:
1192
1193 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
1194 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
1195 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
1196 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
1197 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
1198
1199 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
1200 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
1201 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
1202 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
1203
1204 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
1205 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
1206 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
1207 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
1208 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
1209
1210 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
1211 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
1212 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
1213 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
1214 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
1215
1216 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
1217 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
1218 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
1219 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
1220 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
1221
1222 case BUILT_IN_SYNC_LOCK_RELEASE_1:
1223 case BUILT_IN_SYNC_LOCK_RELEASE_2:
1224 case BUILT_IN_SYNC_LOCK_RELEASE_4:
1225 case BUILT_IN_SYNC_LOCK_RELEASE_8:
1226 case BUILT_IN_SYNC_LOCK_RELEASE_16:
1227
1228 case BUILT_IN_ATOMIC_EXCHANGE_1:
1229 case BUILT_IN_ATOMIC_EXCHANGE_2:
1230 case BUILT_IN_ATOMIC_EXCHANGE_4:
1231 case BUILT_IN_ATOMIC_EXCHANGE_8:
1232 case BUILT_IN_ATOMIC_EXCHANGE_16:
1233
1234 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
1235 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
1236 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
1237 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
1238 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
1239
1240 case BUILT_IN_ATOMIC_STORE_1:
1241 case BUILT_IN_ATOMIC_STORE_2:
1242 case BUILT_IN_ATOMIC_STORE_4:
1243 case BUILT_IN_ATOMIC_STORE_8:
1244 case BUILT_IN_ATOMIC_STORE_16:
1245
1246 case BUILT_IN_ATOMIC_ADD_FETCH_1:
1247 case BUILT_IN_ATOMIC_ADD_FETCH_2:
1248 case BUILT_IN_ATOMIC_ADD_FETCH_4:
1249 case BUILT_IN_ATOMIC_ADD_FETCH_8:
1250 case BUILT_IN_ATOMIC_ADD_FETCH_16:
1251
1252 case BUILT_IN_ATOMIC_SUB_FETCH_1:
1253 case BUILT_IN_ATOMIC_SUB_FETCH_2:
1254 case BUILT_IN_ATOMIC_SUB_FETCH_4:
1255 case BUILT_IN_ATOMIC_SUB_FETCH_8:
1256 case BUILT_IN_ATOMIC_SUB_FETCH_16:
1257
1258 case BUILT_IN_ATOMIC_AND_FETCH_1:
1259 case BUILT_IN_ATOMIC_AND_FETCH_2:
1260 case BUILT_IN_ATOMIC_AND_FETCH_4:
1261 case BUILT_IN_ATOMIC_AND_FETCH_8:
1262 case BUILT_IN_ATOMIC_AND_FETCH_16:
1263
1264 case BUILT_IN_ATOMIC_NAND_FETCH_1:
1265 case BUILT_IN_ATOMIC_NAND_FETCH_2:
1266 case BUILT_IN_ATOMIC_NAND_FETCH_4:
1267 case BUILT_IN_ATOMIC_NAND_FETCH_8:
1268 case BUILT_IN_ATOMIC_NAND_FETCH_16:
1269
1270 case BUILT_IN_ATOMIC_XOR_FETCH_1:
1271 case BUILT_IN_ATOMIC_XOR_FETCH_2:
1272 case BUILT_IN_ATOMIC_XOR_FETCH_4:
1273 case BUILT_IN_ATOMIC_XOR_FETCH_8:
1274 case BUILT_IN_ATOMIC_XOR_FETCH_16:
1275
1276 case BUILT_IN_ATOMIC_OR_FETCH_1:
1277 case BUILT_IN_ATOMIC_OR_FETCH_2:
1278 case BUILT_IN_ATOMIC_OR_FETCH_4:
1279 case BUILT_IN_ATOMIC_OR_FETCH_8:
1280 case BUILT_IN_ATOMIC_OR_FETCH_16:
1281
1282 case BUILT_IN_ATOMIC_FETCH_ADD_1:
1283 case BUILT_IN_ATOMIC_FETCH_ADD_2:
1284 case BUILT_IN_ATOMIC_FETCH_ADD_4:
1285 case BUILT_IN_ATOMIC_FETCH_ADD_8:
1286 case BUILT_IN_ATOMIC_FETCH_ADD_16:
1287
1288 case BUILT_IN_ATOMIC_FETCH_SUB_1:
1289 case BUILT_IN_ATOMIC_FETCH_SUB_2:
1290 case BUILT_IN_ATOMIC_FETCH_SUB_4:
1291 case BUILT_IN_ATOMIC_FETCH_SUB_8:
1292 case BUILT_IN_ATOMIC_FETCH_SUB_16:
1293
1294 case BUILT_IN_ATOMIC_FETCH_AND_1:
1295 case BUILT_IN_ATOMIC_FETCH_AND_2:
1296 case BUILT_IN_ATOMIC_FETCH_AND_4:
1297 case BUILT_IN_ATOMIC_FETCH_AND_8:
1298 case BUILT_IN_ATOMIC_FETCH_AND_16:
1299
1300 case BUILT_IN_ATOMIC_FETCH_NAND_1:
1301 case BUILT_IN_ATOMIC_FETCH_NAND_2:
1302 case BUILT_IN_ATOMIC_FETCH_NAND_4:
1303 case BUILT_IN_ATOMIC_FETCH_NAND_8:
1304 case BUILT_IN_ATOMIC_FETCH_NAND_16:
1305
1306 case BUILT_IN_ATOMIC_FETCH_XOR_1:
1307 case BUILT_IN_ATOMIC_FETCH_XOR_2:
1308 case BUILT_IN_ATOMIC_FETCH_XOR_4:
1309 case BUILT_IN_ATOMIC_FETCH_XOR_8:
1310 case BUILT_IN_ATOMIC_FETCH_XOR_16:
1311
1312 case BUILT_IN_ATOMIC_FETCH_OR_1:
1313 case BUILT_IN_ATOMIC_FETCH_OR_2:
1314 case BUILT_IN_ATOMIC_FETCH_OR_4:
1315 case BUILT_IN_ATOMIC_FETCH_OR_8:
1316 case BUILT_IN_ATOMIC_FETCH_OR_16:
1317 {
1318 dest = gimple_call_arg (call, 0);
1319 /* So DEST represents the address of a memory location.
1320 instrument_derefs wants the memory location, so lets
1321 dereference the address DEST before handing it to
1322 instrument_derefs. */
1323 if (TREE_CODE (dest) == ADDR_EXPR)
1324 dest = TREE_OPERAND (dest, 0);
1325 else if (TREE_CODE (dest) == SSA_NAME)
1326 dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
1327 dest, build_int_cst (TREE_TYPE (dest), 0));
1328 else
1329 gcc_unreachable ();
1330
1331 instrument_derefs (iter, dest, loc, is_store);
1332 return false;
1333 }
1334
1335 default:
1336 /* The other builtins memory access are not instrumented in this
1337 function because they either don't have any length parameter,
1338 or their length parameter is just a limit. */
1339 break;
1340 }
1341
1342 if (len != NULL_TREE)
1343 {
1344 if (source0 != NULL_TREE)
1345 instrument_mem_region_access (source0, len, iter,
1346 loc, /*is_store=*/false);
1347 if (source1 != NULL_TREE)
1348 instrument_mem_region_access (source1, len, iter,
1349 loc, /*is_store=*/false);
1350 else if (dest != NULL_TREE)
1351 instrument_mem_region_access (dest, len, iter,
1352 loc, /*is_store=*/true);
1353
1354 *iter = gsi_for_stmt (call);
1355 return false;
1356 }
1357 return false;
1358 }
1359
1360 /* Instrument the assignment statement ITER if it is subject to
1361 instrumentation. */
1362
1363 static void
1364 instrument_assignment (gimple_stmt_iterator *iter)
1365 {
1366 gimple s = gsi_stmt (*iter);
1367
1368 gcc_assert (gimple_assign_single_p (s));
1369
1370 if (gimple_store_p (s))
1371 instrument_derefs (iter, gimple_assign_lhs (s),
1372 gimple_location (s), true);
1373 if (gimple_assign_load_p (s))
1374 instrument_derefs (iter, gimple_assign_rhs1 (s),
1375 gimple_location (s), false);
1376 }
1377
1378 /* Instrument the function call pointed to by the iterator ITER, if it
1379 is subject to instrumentation. At the moment, the only function
1380 calls that are instrumented are some built-in functions that access
1381 memory. Look at instrument_builtin_call to learn more.
1382
1383 Upon completion return TRUE iff *ITER was advanced to the statement
1384 following the one it was originally pointing to. */
1385
1386 static bool
1387 maybe_instrument_call (gimple_stmt_iterator *iter)
1388 {
1389 gimple stmt = gsi_stmt (*iter);
1390 bool is_builtin = is_gimple_builtin_call (stmt);
1391 if (is_builtin
1392 && instrument_builtin_call (iter))
1393 return true;
1394 if (gimple_call_noreturn_p (stmt))
1395 {
1396 if (is_builtin)
1397 {
1398 tree callee = gimple_call_fndecl (stmt);
1399 switch (DECL_FUNCTION_CODE (callee))
1400 {
1401 case BUILT_IN_UNREACHABLE:
1402 case BUILT_IN_TRAP:
1403 /* Don't instrument these. */
1404 return false;
1405 }
1406 }
1407 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
1408 gimple g = gimple_build_call (decl, 0);
1409 gimple_set_location (g, gimple_location (stmt));
1410 gsi_insert_before (iter, g, GSI_SAME_STMT);
1411 }
1412 return false;
1413 }
1414
1415 /* asan: this looks too complex. Can this be done simpler? */
1416 /* Transform
1417 1) Memory references.
1418 2) BUILTIN_ALLOCA calls.
1419 */
1420
1421 static void
1422 transform_statements (void)
1423 {
1424 basic_block bb;
1425 gimple_stmt_iterator i;
1426 int saved_last_basic_block = last_basic_block;
1427
1428 FOR_EACH_BB (bb)
1429 {
1430 if (bb->index >= saved_last_basic_block) continue;
1431 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
1432 {
1433 gimple s = gsi_stmt (i);
1434
1435 if (gimple_assign_single_p (s))
1436 instrument_assignment (&i);
1437 else if (is_gimple_call (s))
1438 {
1439 if (maybe_instrument_call (&i))
1440 /* Avoid gsi_next (&i), because maybe_instrument_call
1441 advanced the I iterator already. */
1442 continue;
1443 }
1444 gsi_next (&i);
1445 }
1446 }
1447 }
1448
1449 /* Build
1450 struct __asan_global
1451 {
1452 const void *__beg;
1453 uptr __size;
1454 uptr __size_with_redzone;
1455 const void *__name;
1456 uptr __has_dynamic_init;
1457 } type. */
1458
1459 static tree
1460 asan_global_struct (void)
1461 {
1462 static const char *field_names[5]
1463 = { "__beg", "__size", "__size_with_redzone",
1464 "__name", "__has_dynamic_init" };
1465 tree fields[5], ret;
1466 int i;
1467
1468 ret = make_node (RECORD_TYPE);
1469 for (i = 0; i < 5; i++)
1470 {
1471 fields[i]
1472 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
1473 get_identifier (field_names[i]),
1474 (i == 0 || i == 3) ? const_ptr_type_node
1475 : build_nonstandard_integer_type (POINTER_SIZE, 1));
1476 DECL_CONTEXT (fields[i]) = ret;
1477 if (i)
1478 DECL_CHAIN (fields[i - 1]) = fields[i];
1479 }
1480 TYPE_FIELDS (ret) = fields[0];
1481 TYPE_NAME (ret) = get_identifier ("__asan_global");
1482 layout_type (ret);
1483 return ret;
1484 }
1485
1486 /* Append description of a single global DECL into vector V.
1487 TYPE is __asan_global struct type as returned by asan_global_struct. */
1488
1489 static void
1490 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
1491 {
1492 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
1493 unsigned HOST_WIDE_INT size;
1494 tree str_cst, refdecl = decl;
1495 vec<constructor_elt, va_gc> *vinner = NULL;
1496
1497 if (!asan_pp_initialized)
1498 asan_pp_initialize ();
1499
1500 pp_clear_output_area (&asan_pp);
1501 if (DECL_NAME (decl))
1502 pp_base_tree_identifier (&asan_pp, DECL_NAME (decl));
1503 else
1504 pp_string (&asan_pp, "<unknown>");
1505 pp_space (&asan_pp);
1506 pp_left_paren (&asan_pp);
1507 pp_string (&asan_pp, main_input_filename);
1508 pp_right_paren (&asan_pp);
1509 str_cst = asan_pp_string ();
1510
1511 if (asan_needs_local_alias (decl))
1512 {
1513 char buf[20];
1514 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
1515 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
1516 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
1517 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
1518 TREE_READONLY (refdecl) = TREE_READONLY (decl);
1519 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
1520 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
1521 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
1522 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
1523 TREE_STATIC (refdecl) = 1;
1524 TREE_PUBLIC (refdecl) = 0;
1525 TREE_USED (refdecl) = 1;
1526 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
1527 }
1528
1529 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
1530 fold_convert (const_ptr_type_node,
1531 build_fold_addr_expr (refdecl)));
1532 size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
1533 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
1534 size += asan_red_zone_size (size);
1535 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
1536 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
1537 fold_convert (const_ptr_type_node, str_cst));
1538 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, 0));
1539 init = build_constructor (type, vinner);
1540 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
1541 }
1542
1543 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
1544 void
1545 initialize_sanitizer_builtins (void)
1546 {
1547 tree decl;
1548
1549 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
1550 return;
1551
1552 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
1553 tree BT_FN_VOID_PTR
1554 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1555 tree BT_FN_VOID_PTR_PTRMODE
1556 = build_function_type_list (void_type_node, ptr_type_node,
1557 build_nonstandard_integer_type (POINTER_SIZE,
1558 1), NULL_TREE);
1559 tree BT_FN_VOID_INT
1560 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
1561 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
1562 tree BT_FN_IX_CONST_VPTR_INT[5];
1563 tree BT_FN_IX_VPTR_IX_INT[5];
1564 tree BT_FN_VOID_VPTR_IX_INT[5];
1565 tree vptr
1566 = build_pointer_type (build_qualified_type (void_type_node,
1567 TYPE_QUAL_VOLATILE));
1568 tree cvptr
1569 = build_pointer_type (build_qualified_type (void_type_node,
1570 TYPE_QUAL_VOLATILE
1571 |TYPE_QUAL_CONST));
1572 tree boolt
1573 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
1574 int i;
1575 for (i = 0; i < 5; i++)
1576 {
1577 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
1578 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
1579 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
1580 integer_type_node, integer_type_node,
1581 NULL_TREE);
1582 BT_FN_IX_CONST_VPTR_INT[i]
1583 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
1584 BT_FN_IX_VPTR_IX_INT[i]
1585 = build_function_type_list (ix, vptr, ix, integer_type_node,
1586 NULL_TREE);
1587 BT_FN_VOID_VPTR_IX_INT[i]
1588 = build_function_type_list (void_type_node, vptr, ix,
1589 integer_type_node, NULL_TREE);
1590 }
1591 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
1592 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
1593 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
1594 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
1595 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
1596 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
1597 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
1598 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
1599 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
1600 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
1601 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
1602 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
1603 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
1604 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
1605 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
1606 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
1607 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
1608 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
1609 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
1610 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
1611 #undef ATTR_NOTHROW_LEAF_LIST
1612 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
1613 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
1614 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
1615 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
1616 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
1617 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
1618 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
1619 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
1620 #undef DEF_SANITIZER_BUILTIN
1621 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
1622 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
1623 BUILT_IN_NORMAL, NAME, NULL_TREE); \
1624 set_call_expr_flags (decl, ATTRS); \
1625 set_builtin_decl (ENUM, decl, true);
1626
1627 #include "sanitizer.def"
1628
1629 #undef DEF_SANITIZER_BUILTIN
1630 }
1631
1632 /* Called via htab_traverse. Count number of emitted
1633 STRING_CSTs in the constant hash table. */
1634
1635 static int
1636 count_string_csts (void **slot, void *data)
1637 {
1638 struct constant_descriptor_tree *desc
1639 = (struct constant_descriptor_tree *) *slot;
1640 if (TREE_CODE (desc->value) == STRING_CST
1641 && TREE_ASM_WRITTEN (desc->value)
1642 && asan_protect_global (desc->value))
1643 ++*((unsigned HOST_WIDE_INT *) data);
1644 return 1;
1645 }
1646
1647 /* Helper structure to pass two parameters to
1648 add_string_csts. */
1649
1650 struct asan_add_string_csts_data
1651 {
1652 tree type;
1653 vec<constructor_elt, va_gc> *v;
1654 };
1655
1656 /* Called via htab_traverse. Call asan_add_global
1657 on emitted STRING_CSTs from the constant hash table. */
1658
1659 static int
1660 add_string_csts (void **slot, void *data)
1661 {
1662 struct constant_descriptor_tree *desc
1663 = (struct constant_descriptor_tree *) *slot;
1664 if (TREE_CODE (desc->value) == STRING_CST
1665 && TREE_ASM_WRITTEN (desc->value)
1666 && asan_protect_global (desc->value))
1667 {
1668 struct asan_add_string_csts_data *aascd
1669 = (struct asan_add_string_csts_data *) data;
1670 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
1671 aascd->type, aascd->v);
1672 }
1673 return 1;
1674 }
1675
1676 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
1677 invoke ggc_collect. */
1678 static GTY(()) tree asan_ctor_statements;
1679
1680 /* Module-level instrumentation.
1681 - Insert __asan_init() into the list of CTORs.
1682 - TODO: insert redzones around globals.
1683 */
1684
1685 void
1686 asan_finish_file (void)
1687 {
1688 struct varpool_node *vnode;
1689 unsigned HOST_WIDE_INT gcount = 0;
1690
1691 if (shadow_ptr_types[0] == NULL_TREE)
1692 asan_init_shadow_ptr_types ();
1693 /* Avoid instrumenting code in the asan ctors/dtors.
1694 We don't need to insert padding after the description strings,
1695 nor after .LASAN* array. */
1696 flag_asan = 0;
1697
1698 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
1699 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
1700 FOR_EACH_DEFINED_VARIABLE (vnode)
1701 if (TREE_ASM_WRITTEN (vnode->symbol.decl)
1702 && asan_protect_global (vnode->symbol.decl))
1703 ++gcount;
1704 htab_t const_desc_htab = constant_pool_htab ();
1705 htab_traverse (const_desc_htab, count_string_csts, &gcount);
1706 if (gcount)
1707 {
1708 tree type = asan_global_struct (), var, ctor;
1709 tree uptr = build_nonstandard_integer_type (POINTER_SIZE, 1);
1710 tree dtor_statements = NULL_TREE;
1711 vec<constructor_elt, va_gc> *v;
1712 char buf[20];
1713
1714 type = build_array_type_nelts (type, gcount);
1715 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
1716 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
1717 type);
1718 TREE_STATIC (var) = 1;
1719 TREE_PUBLIC (var) = 0;
1720 DECL_ARTIFICIAL (var) = 1;
1721 DECL_IGNORED_P (var) = 1;
1722 vec_alloc (v, gcount);
1723 FOR_EACH_DEFINED_VARIABLE (vnode)
1724 if (TREE_ASM_WRITTEN (vnode->symbol.decl)
1725 && asan_protect_global (vnode->symbol.decl))
1726 asan_add_global (vnode->symbol.decl, TREE_TYPE (type), v);
1727 struct asan_add_string_csts_data aascd;
1728 aascd.type = TREE_TYPE (type);
1729 aascd.v = v;
1730 htab_traverse (const_desc_htab, add_string_csts, &aascd);
1731 ctor = build_constructor (type, v);
1732 TREE_CONSTANT (ctor) = 1;
1733 TREE_STATIC (ctor) = 1;
1734 DECL_INITIAL (var) = ctor;
1735 varpool_assemble_decl (varpool_node_for_decl (var));
1736
1737 fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
1738 append_to_statement_list (build_call_expr (fn, 2,
1739 build_fold_addr_expr (var),
1740 build_int_cst (uptr, gcount)),
1741 &asan_ctor_statements);
1742
1743 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
1744 append_to_statement_list (build_call_expr (fn, 2,
1745 build_fold_addr_expr (var),
1746 build_int_cst (uptr, gcount)),
1747 &dtor_statements);
1748 cgraph_build_static_cdtor ('D', dtor_statements,
1749 MAX_RESERVED_INIT_PRIORITY - 1);
1750 }
1751 cgraph_build_static_cdtor ('I', asan_ctor_statements,
1752 MAX_RESERVED_INIT_PRIORITY - 1);
1753 flag_asan = 1;
1754 }
1755
1756 /* Instrument the current function. */
1757
1758 static unsigned int
1759 asan_instrument (void)
1760 {
1761 if (shadow_ptr_types[0] == NULL_TREE)
1762 asan_init_shadow_ptr_types ();
1763 transform_statements ();
1764 return 0;
1765 }
1766
1767 static bool
1768 gate_asan (void)
1769 {
1770 return flag_asan != 0
1771 && !lookup_attribute ("no_address_safety_analysis",
1772 DECL_ATTRIBUTES (current_function_decl));
1773 }
1774
1775 struct gimple_opt_pass pass_asan =
1776 {
1777 {
1778 GIMPLE_PASS,
1779 "asan", /* name */
1780 OPTGROUP_NONE, /* optinfo_flags */
1781 gate_asan, /* gate */
1782 asan_instrument, /* execute */
1783 NULL, /* sub */
1784 NULL, /* next */
1785 0, /* static_pass_number */
1786 TV_NONE, /* tv_id */
1787 PROP_ssa | PROP_cfg | PROP_gimple_leh,/* properties_required */
1788 0, /* properties_provided */
1789 0, /* properties_destroyed */
1790 0, /* todo_flags_start */
1791 TODO_verify_flow | TODO_verify_stmts
1792 | TODO_update_ssa /* todo_flags_finish */
1793 }
1794 };
1795
1796 static bool
1797 gate_asan_O0 (void)
1798 {
1799 return !optimize && gate_asan ();
1800 }
1801
1802 struct gimple_opt_pass pass_asan_O0 =
1803 {
1804 {
1805 GIMPLE_PASS,
1806 "asan0", /* name */
1807 OPTGROUP_NONE, /* optinfo_flags */
1808 gate_asan_O0, /* gate */
1809 asan_instrument, /* execute */
1810 NULL, /* sub */
1811 NULL, /* next */
1812 0, /* static_pass_number */
1813 TV_NONE, /* tv_id */
1814 PROP_ssa | PROP_cfg | PROP_gimple_leh,/* properties_required */
1815 0, /* properties_provided */
1816 0, /* properties_destroyed */
1817 0, /* todo_flags_start */
1818 TODO_verify_flow | TODO_verify_stmts
1819 | TODO_update_ssa /* todo_flags_finish */
1820 }
1821 };
1822
1823 #include "gt-asan.h"