[RS6000] Don't restore fixed regs
[gcc.git] / gcc / tsan.c
1 /* GCC instrumentation plugin for ThreadSanitizer.
2 Copyright (C) 2011-2017 Free Software Foundation, Inc.
3 Contributed by Dmitry Vyukov <dvyukov@google.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "memmodel.h"
29 #include "gimple.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "fold-const.h"
34 #include "gimplify.h"
35 #include "gimple-iterator.h"
36 #include "gimplify-me.h"
37 #include "tree-cfg.h"
38 #include "tree-iterator.h"
39 #include "tree-ssa-propagate.h"
40 #include "tree-ssa-loop-ivopts.h"
41 #include "tree-eh.h"
42 #include "tsan.h"
43 #include "stringpool.h"
44 #include "attribs.h"
45 #include "asan.h"
46 #include "builtins.h"
47 #include "target.h"
48
49 /* Number of instrumented memory accesses in the current function. */
50
51 /* Builds the following decl
52 void __tsan_read/writeX (void *addr); */
53
54 static tree
55 get_memory_access_decl (bool is_write, unsigned size)
56 {
57 enum built_in_function fcode;
58
59 if (size <= 1)
60 fcode = is_write ? BUILT_IN_TSAN_WRITE1
61 : BUILT_IN_TSAN_READ1;
62 else if (size <= 3)
63 fcode = is_write ? BUILT_IN_TSAN_WRITE2
64 : BUILT_IN_TSAN_READ2;
65 else if (size <= 7)
66 fcode = is_write ? BUILT_IN_TSAN_WRITE4
67 : BUILT_IN_TSAN_READ4;
68 else if (size <= 15)
69 fcode = is_write ? BUILT_IN_TSAN_WRITE8
70 : BUILT_IN_TSAN_READ8;
71 else
72 fcode = is_write ? BUILT_IN_TSAN_WRITE16
73 : BUILT_IN_TSAN_READ16;
74
75 return builtin_decl_implicit (fcode);
76 }
77
78 /* Check as to whether EXPR refers to a store to vptr. */
79
80 static tree
81 is_vptr_store (gimple *stmt, tree expr, bool is_write)
82 {
83 if (is_write == true
84 && gimple_assign_single_p (stmt)
85 && TREE_CODE (expr) == COMPONENT_REF)
86 {
87 tree field = TREE_OPERAND (expr, 1);
88 if (TREE_CODE (field) == FIELD_DECL
89 && DECL_VIRTUAL_P (field))
90 return gimple_assign_rhs1 (stmt);
91 }
92 return NULL;
93 }
94
95 /* Instruments EXPR if needed. If any instrumentation is inserted,
96 return true. */
97
98 static bool
99 instrument_expr (gimple_stmt_iterator gsi, tree expr, bool is_write)
100 {
101 tree base, rhs, expr_ptr, builtin_decl;
102 basic_block bb;
103 HOST_WIDE_INT size;
104 gimple *stmt, *g;
105 gimple_seq seq;
106 location_t loc;
107 unsigned int align;
108
109 size = int_size_in_bytes (TREE_TYPE (expr));
110 if (size <= 0)
111 return false;
112
113 HOST_WIDE_INT bitsize, bitpos;
114 tree offset;
115 machine_mode mode;
116 int unsignedp, reversep, volatilep = 0;
117 base = get_inner_reference (expr, &bitsize, &bitpos, &offset, &mode,
118 &unsignedp, &reversep, &volatilep);
119
120 /* No need to instrument accesses to decls that don't escape,
121 they can't escape to other threads then. */
122 if (DECL_P (base) && !is_global_var (base))
123 {
124 struct pt_solution pt;
125 memset (&pt, 0, sizeof (pt));
126 pt.escaped = 1;
127 pt.ipa_escaped = flag_ipa_pta != 0;
128 if (!pt_solution_includes (&pt, base))
129 return false;
130 if (!may_be_aliased (base))
131 return false;
132 }
133
134 if (TREE_READONLY (base) || (VAR_P (base) && DECL_HARD_REGISTER (base)))
135 return false;
136
137 stmt = gsi_stmt (gsi);
138 loc = gimple_location (stmt);
139 rhs = is_vptr_store (stmt, expr, is_write);
140
141 if ((TREE_CODE (expr) == COMPONENT_REF
142 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (expr, 1)))
143 || TREE_CODE (expr) == BIT_FIELD_REF)
144 {
145 base = TREE_OPERAND (expr, 0);
146 if (TREE_CODE (expr) == COMPONENT_REF)
147 {
148 expr = TREE_OPERAND (expr, 1);
149 if (is_write && DECL_BIT_FIELD_REPRESENTATIVE (expr))
150 expr = DECL_BIT_FIELD_REPRESENTATIVE (expr);
151 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (expr))
152 || !tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (expr))
153 || !tree_fits_uhwi_p (DECL_SIZE (expr)))
154 return false;
155 bitpos = tree_to_uhwi (DECL_FIELD_OFFSET (expr)) * BITS_PER_UNIT
156 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (expr));
157 bitsize = tree_to_uhwi (DECL_SIZE (expr));
158 }
159 else
160 {
161 if (!tree_fits_uhwi_p (TREE_OPERAND (expr, 2))
162 || !tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
163 return false;
164 bitpos = tree_to_uhwi (TREE_OPERAND (expr, 2));
165 bitsize = tree_to_uhwi (TREE_OPERAND (expr, 1));
166 }
167 if (bitpos < 0 || bitsize <= 0)
168 return false;
169 size = (bitpos % BITS_PER_UNIT + bitsize + BITS_PER_UNIT - 1)
170 / BITS_PER_UNIT;
171 if (may_be_nonaddressable_p (base))
172 return false;
173 align = get_object_alignment (base);
174 if (align < BITS_PER_UNIT)
175 return false;
176 bitpos = bitpos & ~(BITS_PER_UNIT - 1);
177 if ((align - 1) & bitpos)
178 {
179 align = (align - 1) & bitpos;
180 align = least_bit_hwi (align);
181 }
182 expr = build_fold_addr_expr (unshare_expr (base));
183 expr = build2 (MEM_REF, char_type_node, expr,
184 build_int_cst (TREE_TYPE (expr), bitpos / BITS_PER_UNIT));
185 expr_ptr = build_fold_addr_expr (expr);
186 }
187 else
188 {
189 if (may_be_nonaddressable_p (expr))
190 return false;
191 align = get_object_alignment (expr);
192 if (align < BITS_PER_UNIT)
193 return false;
194 expr_ptr = build_fold_addr_expr (unshare_expr (expr));
195 }
196 expr_ptr = force_gimple_operand (expr_ptr, &seq, true, NULL_TREE);
197 if ((size & (size - 1)) != 0 || size > 16
198 || align < MIN (size, 8) * BITS_PER_UNIT)
199 {
200 builtin_decl = builtin_decl_implicit (is_write
201 ? BUILT_IN_TSAN_WRITE_RANGE
202 : BUILT_IN_TSAN_READ_RANGE);
203 g = gimple_build_call (builtin_decl, 2, expr_ptr, size_int (size));
204 }
205 else if (rhs == NULL)
206 g = gimple_build_call (get_memory_access_decl (is_write, size),
207 1, expr_ptr);
208 else
209 {
210 builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_VPTR_UPDATE);
211 g = gimple_build_call (builtin_decl, 2, expr_ptr, unshare_expr (rhs));
212 }
213 gimple_set_location (g, loc);
214 gimple_seq_add_stmt_without_update (&seq, g);
215 /* Instrumentation for assignment of a function result
216 must be inserted after the call. Instrumentation for
217 reads of function arguments must be inserted before the call.
218 That's because the call can contain synchronization. */
219 if (is_gimple_call (stmt) && is_write)
220 {
221 /* If the call can throw, it must be the last stmt in
222 a basic block, so the instrumented stmts need to be
223 inserted in successor bbs. */
224 if (is_ctrl_altering_stmt (stmt))
225 {
226 edge e;
227
228 bb = gsi_bb (gsi);
229 e = find_fallthru_edge (bb->succs);
230 if (e)
231 gsi_insert_seq_on_edge_immediate (e, seq);
232 }
233 else
234 gsi_insert_seq_after (&gsi, seq, GSI_NEW_STMT);
235 }
236 else
237 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
238
239 return true;
240 }
241
242 /* Actions for sync/atomic builtin transformations. */
243 enum tsan_atomic_action
244 {
245 check_last, add_seq_cst, add_acquire, weak_cas, strong_cas,
246 bool_cas, val_cas, lock_release, fetch_op, fetch_op_seq_cst,
247 bool_clear, bool_test_and_set
248 };
249
250 /* Table how to map sync/atomic builtins to their corresponding
251 tsan equivalents. */
252 static const struct tsan_map_atomic
253 {
254 enum built_in_function fcode, tsan_fcode;
255 enum tsan_atomic_action action;
256 enum tree_code code;
257 } tsan_atomic_table[] =
258 {
259 #define TRANSFORM(fcode, tsan_fcode, action, code) \
260 { BUILT_IN_##fcode, BUILT_IN_##tsan_fcode, action, code }
261 #define CHECK_LAST(fcode, tsan_fcode) \
262 TRANSFORM (fcode, tsan_fcode, check_last, ERROR_MARK)
263 #define ADD_SEQ_CST(fcode, tsan_fcode) \
264 TRANSFORM (fcode, tsan_fcode, add_seq_cst, ERROR_MARK)
265 #define ADD_ACQUIRE(fcode, tsan_fcode) \
266 TRANSFORM (fcode, tsan_fcode, add_acquire, ERROR_MARK)
267 #define WEAK_CAS(fcode, tsan_fcode) \
268 TRANSFORM (fcode, tsan_fcode, weak_cas, ERROR_MARK)
269 #define STRONG_CAS(fcode, tsan_fcode) \
270 TRANSFORM (fcode, tsan_fcode, strong_cas, ERROR_MARK)
271 #define BOOL_CAS(fcode, tsan_fcode) \
272 TRANSFORM (fcode, tsan_fcode, bool_cas, ERROR_MARK)
273 #define VAL_CAS(fcode, tsan_fcode) \
274 TRANSFORM (fcode, tsan_fcode, val_cas, ERROR_MARK)
275 #define LOCK_RELEASE(fcode, tsan_fcode) \
276 TRANSFORM (fcode, tsan_fcode, lock_release, ERROR_MARK)
277 #define FETCH_OP(fcode, tsan_fcode, code) \
278 TRANSFORM (fcode, tsan_fcode, fetch_op, code)
279 #define FETCH_OPS(fcode, tsan_fcode, code) \
280 TRANSFORM (fcode, tsan_fcode, fetch_op_seq_cst, code)
281 #define BOOL_CLEAR(fcode, tsan_fcode) \
282 TRANSFORM (fcode, tsan_fcode, bool_clear, ERROR_MARK)
283 #define BOOL_TEST_AND_SET(fcode, tsan_fcode) \
284 TRANSFORM (fcode, tsan_fcode, bool_test_and_set, ERROR_MARK)
285
286 CHECK_LAST (ATOMIC_LOAD_1, TSAN_ATOMIC8_LOAD),
287 CHECK_LAST (ATOMIC_LOAD_2, TSAN_ATOMIC16_LOAD),
288 CHECK_LAST (ATOMIC_LOAD_4, TSAN_ATOMIC32_LOAD),
289 CHECK_LAST (ATOMIC_LOAD_8, TSAN_ATOMIC64_LOAD),
290 CHECK_LAST (ATOMIC_LOAD_16, TSAN_ATOMIC128_LOAD),
291 CHECK_LAST (ATOMIC_STORE_1, TSAN_ATOMIC8_STORE),
292 CHECK_LAST (ATOMIC_STORE_2, TSAN_ATOMIC16_STORE),
293 CHECK_LAST (ATOMIC_STORE_4, TSAN_ATOMIC32_STORE),
294 CHECK_LAST (ATOMIC_STORE_8, TSAN_ATOMIC64_STORE),
295 CHECK_LAST (ATOMIC_STORE_16, TSAN_ATOMIC128_STORE),
296 CHECK_LAST (ATOMIC_EXCHANGE_1, TSAN_ATOMIC8_EXCHANGE),
297 CHECK_LAST (ATOMIC_EXCHANGE_2, TSAN_ATOMIC16_EXCHANGE),
298 CHECK_LAST (ATOMIC_EXCHANGE_4, TSAN_ATOMIC32_EXCHANGE),
299 CHECK_LAST (ATOMIC_EXCHANGE_8, TSAN_ATOMIC64_EXCHANGE),
300 CHECK_LAST (ATOMIC_EXCHANGE_16, TSAN_ATOMIC128_EXCHANGE),
301 CHECK_LAST (ATOMIC_FETCH_ADD_1, TSAN_ATOMIC8_FETCH_ADD),
302 CHECK_LAST (ATOMIC_FETCH_ADD_2, TSAN_ATOMIC16_FETCH_ADD),
303 CHECK_LAST (ATOMIC_FETCH_ADD_4, TSAN_ATOMIC32_FETCH_ADD),
304 CHECK_LAST (ATOMIC_FETCH_ADD_8, TSAN_ATOMIC64_FETCH_ADD),
305 CHECK_LAST (ATOMIC_FETCH_ADD_16, TSAN_ATOMIC128_FETCH_ADD),
306 CHECK_LAST (ATOMIC_FETCH_SUB_1, TSAN_ATOMIC8_FETCH_SUB),
307 CHECK_LAST (ATOMIC_FETCH_SUB_2, TSAN_ATOMIC16_FETCH_SUB),
308 CHECK_LAST (ATOMIC_FETCH_SUB_4, TSAN_ATOMIC32_FETCH_SUB),
309 CHECK_LAST (ATOMIC_FETCH_SUB_8, TSAN_ATOMIC64_FETCH_SUB),
310 CHECK_LAST (ATOMIC_FETCH_SUB_16, TSAN_ATOMIC128_FETCH_SUB),
311 CHECK_LAST (ATOMIC_FETCH_AND_1, TSAN_ATOMIC8_FETCH_AND),
312 CHECK_LAST (ATOMIC_FETCH_AND_2, TSAN_ATOMIC16_FETCH_AND),
313 CHECK_LAST (ATOMIC_FETCH_AND_4, TSAN_ATOMIC32_FETCH_AND),
314 CHECK_LAST (ATOMIC_FETCH_AND_8, TSAN_ATOMIC64_FETCH_AND),
315 CHECK_LAST (ATOMIC_FETCH_AND_16, TSAN_ATOMIC128_FETCH_AND),
316 CHECK_LAST (ATOMIC_FETCH_OR_1, TSAN_ATOMIC8_FETCH_OR),
317 CHECK_LAST (ATOMIC_FETCH_OR_2, TSAN_ATOMIC16_FETCH_OR),
318 CHECK_LAST (ATOMIC_FETCH_OR_4, TSAN_ATOMIC32_FETCH_OR),
319 CHECK_LAST (ATOMIC_FETCH_OR_8, TSAN_ATOMIC64_FETCH_OR),
320 CHECK_LAST (ATOMIC_FETCH_OR_16, TSAN_ATOMIC128_FETCH_OR),
321 CHECK_LAST (ATOMIC_FETCH_XOR_1, TSAN_ATOMIC8_FETCH_XOR),
322 CHECK_LAST (ATOMIC_FETCH_XOR_2, TSAN_ATOMIC16_FETCH_XOR),
323 CHECK_LAST (ATOMIC_FETCH_XOR_4, TSAN_ATOMIC32_FETCH_XOR),
324 CHECK_LAST (ATOMIC_FETCH_XOR_8, TSAN_ATOMIC64_FETCH_XOR),
325 CHECK_LAST (ATOMIC_FETCH_XOR_16, TSAN_ATOMIC128_FETCH_XOR),
326 CHECK_LAST (ATOMIC_FETCH_NAND_1, TSAN_ATOMIC8_FETCH_NAND),
327 CHECK_LAST (ATOMIC_FETCH_NAND_2, TSAN_ATOMIC16_FETCH_NAND),
328 CHECK_LAST (ATOMIC_FETCH_NAND_4, TSAN_ATOMIC32_FETCH_NAND),
329 CHECK_LAST (ATOMIC_FETCH_NAND_8, TSAN_ATOMIC64_FETCH_NAND),
330 CHECK_LAST (ATOMIC_FETCH_NAND_16, TSAN_ATOMIC128_FETCH_NAND),
331
332 CHECK_LAST (ATOMIC_THREAD_FENCE, TSAN_ATOMIC_THREAD_FENCE),
333 CHECK_LAST (ATOMIC_SIGNAL_FENCE, TSAN_ATOMIC_SIGNAL_FENCE),
334
335 FETCH_OP (ATOMIC_ADD_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR),
336 FETCH_OP (ATOMIC_ADD_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR),
337 FETCH_OP (ATOMIC_ADD_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR),
338 FETCH_OP (ATOMIC_ADD_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR),
339 FETCH_OP (ATOMIC_ADD_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR),
340 FETCH_OP (ATOMIC_SUB_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR),
341 FETCH_OP (ATOMIC_SUB_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR),
342 FETCH_OP (ATOMIC_SUB_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR),
343 FETCH_OP (ATOMIC_SUB_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR),
344 FETCH_OP (ATOMIC_SUB_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR),
345 FETCH_OP (ATOMIC_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR),
346 FETCH_OP (ATOMIC_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR),
347 FETCH_OP (ATOMIC_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR),
348 FETCH_OP (ATOMIC_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR),
349 FETCH_OP (ATOMIC_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR),
350 FETCH_OP (ATOMIC_OR_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR),
351 FETCH_OP (ATOMIC_OR_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR),
352 FETCH_OP (ATOMIC_OR_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR),
353 FETCH_OP (ATOMIC_OR_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR),
354 FETCH_OP (ATOMIC_OR_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR),
355 FETCH_OP (ATOMIC_XOR_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR),
356 FETCH_OP (ATOMIC_XOR_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR),
357 FETCH_OP (ATOMIC_XOR_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR),
358 FETCH_OP (ATOMIC_XOR_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR),
359 FETCH_OP (ATOMIC_XOR_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR),
360 FETCH_OP (ATOMIC_NAND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR),
361 FETCH_OP (ATOMIC_NAND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR),
362 FETCH_OP (ATOMIC_NAND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR),
363 FETCH_OP (ATOMIC_NAND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR),
364 FETCH_OP (ATOMIC_NAND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR),
365
366 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_1, TSAN_ATOMIC8_EXCHANGE),
367 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_2, TSAN_ATOMIC16_EXCHANGE),
368 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_4, TSAN_ATOMIC32_EXCHANGE),
369 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_8, TSAN_ATOMIC64_EXCHANGE),
370 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_16, TSAN_ATOMIC128_EXCHANGE),
371
372 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_1, TSAN_ATOMIC8_FETCH_ADD),
373 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_2, TSAN_ATOMIC16_FETCH_ADD),
374 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_4, TSAN_ATOMIC32_FETCH_ADD),
375 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_8, TSAN_ATOMIC64_FETCH_ADD),
376 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_16, TSAN_ATOMIC128_FETCH_ADD),
377 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_1, TSAN_ATOMIC8_FETCH_SUB),
378 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_2, TSAN_ATOMIC16_FETCH_SUB),
379 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_4, TSAN_ATOMIC32_FETCH_SUB),
380 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_8, TSAN_ATOMIC64_FETCH_SUB),
381 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_16, TSAN_ATOMIC128_FETCH_SUB),
382 ADD_SEQ_CST (SYNC_FETCH_AND_AND_1, TSAN_ATOMIC8_FETCH_AND),
383 ADD_SEQ_CST (SYNC_FETCH_AND_AND_2, TSAN_ATOMIC16_FETCH_AND),
384 ADD_SEQ_CST (SYNC_FETCH_AND_AND_4, TSAN_ATOMIC32_FETCH_AND),
385 ADD_SEQ_CST (SYNC_FETCH_AND_AND_8, TSAN_ATOMIC64_FETCH_AND),
386 ADD_SEQ_CST (SYNC_FETCH_AND_AND_16, TSAN_ATOMIC128_FETCH_AND),
387 ADD_SEQ_CST (SYNC_FETCH_AND_OR_1, TSAN_ATOMIC8_FETCH_OR),
388 ADD_SEQ_CST (SYNC_FETCH_AND_OR_2, TSAN_ATOMIC16_FETCH_OR),
389 ADD_SEQ_CST (SYNC_FETCH_AND_OR_4, TSAN_ATOMIC32_FETCH_OR),
390 ADD_SEQ_CST (SYNC_FETCH_AND_OR_8, TSAN_ATOMIC64_FETCH_OR),
391 ADD_SEQ_CST (SYNC_FETCH_AND_OR_16, TSAN_ATOMIC128_FETCH_OR),
392 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_1, TSAN_ATOMIC8_FETCH_XOR),
393 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_2, TSAN_ATOMIC16_FETCH_XOR),
394 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_4, TSAN_ATOMIC32_FETCH_XOR),
395 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_8, TSAN_ATOMIC64_FETCH_XOR),
396 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_16, TSAN_ATOMIC128_FETCH_XOR),
397 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_1, TSAN_ATOMIC8_FETCH_NAND),
398 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_2, TSAN_ATOMIC16_FETCH_NAND),
399 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_4, TSAN_ATOMIC32_FETCH_NAND),
400 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_8, TSAN_ATOMIC64_FETCH_NAND),
401 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_16, TSAN_ATOMIC128_FETCH_NAND),
402
403 ADD_SEQ_CST (SYNC_SYNCHRONIZE, TSAN_ATOMIC_THREAD_FENCE),
404
405 FETCH_OPS (SYNC_ADD_AND_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR),
406 FETCH_OPS (SYNC_ADD_AND_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR),
407 FETCH_OPS (SYNC_ADD_AND_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR),
408 FETCH_OPS (SYNC_ADD_AND_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR),
409 FETCH_OPS (SYNC_ADD_AND_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR),
410 FETCH_OPS (SYNC_SUB_AND_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR),
411 FETCH_OPS (SYNC_SUB_AND_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR),
412 FETCH_OPS (SYNC_SUB_AND_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR),
413 FETCH_OPS (SYNC_SUB_AND_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR),
414 FETCH_OPS (SYNC_SUB_AND_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR),
415 FETCH_OPS (SYNC_AND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR),
416 FETCH_OPS (SYNC_AND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR),
417 FETCH_OPS (SYNC_AND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR),
418 FETCH_OPS (SYNC_AND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR),
419 FETCH_OPS (SYNC_AND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR),
420 FETCH_OPS (SYNC_OR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR),
421 FETCH_OPS (SYNC_OR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR),
422 FETCH_OPS (SYNC_OR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR),
423 FETCH_OPS (SYNC_OR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR),
424 FETCH_OPS (SYNC_OR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR),
425 FETCH_OPS (SYNC_XOR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR),
426 FETCH_OPS (SYNC_XOR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR),
427 FETCH_OPS (SYNC_XOR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR),
428 FETCH_OPS (SYNC_XOR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR),
429 FETCH_OPS (SYNC_XOR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR),
430 FETCH_OPS (SYNC_NAND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR),
431 FETCH_OPS (SYNC_NAND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR),
432 FETCH_OPS (SYNC_NAND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR),
433 FETCH_OPS (SYNC_NAND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR),
434 FETCH_OPS (SYNC_NAND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR),
435
436 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_WEAK),
437 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_WEAK),
438 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_WEAK),
439 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_WEAK),
440 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_16, TSAN_ATOMIC128_COMPARE_EXCHANGE_WEAK),
441
442 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
443 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_2,
444 TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
445 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_4,
446 TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
447 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_8,
448 TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
449 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_16,
450 TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),
451
452 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_1,
453 TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
454 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_2,
455 TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
456 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_4,
457 TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
458 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_8,
459 TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
460 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_16,
461 TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),
462
463 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
464 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
465 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
466 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
467 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_16,
468 TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),
469
470 LOCK_RELEASE (SYNC_LOCK_RELEASE_1, TSAN_ATOMIC8_STORE),
471 LOCK_RELEASE (SYNC_LOCK_RELEASE_2, TSAN_ATOMIC16_STORE),
472 LOCK_RELEASE (SYNC_LOCK_RELEASE_4, TSAN_ATOMIC32_STORE),
473 LOCK_RELEASE (SYNC_LOCK_RELEASE_8, TSAN_ATOMIC64_STORE),
474 LOCK_RELEASE (SYNC_LOCK_RELEASE_16, TSAN_ATOMIC128_STORE),
475
476 BOOL_CLEAR (ATOMIC_CLEAR, TSAN_ATOMIC8_STORE),
477
478 BOOL_TEST_AND_SET (ATOMIC_TEST_AND_SET, TSAN_ATOMIC8_EXCHANGE)
479 };
480
481 /* Instrument an atomic builtin. */
482
483 static void
484 instrument_builtin_call (gimple_stmt_iterator *gsi)
485 {
486 gimple *stmt = gsi_stmt (*gsi), *g;
487 tree callee = gimple_call_fndecl (stmt), last_arg, args[6], t, lhs;
488 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
489 unsigned int i, num = gimple_call_num_args (stmt), j;
490 for (j = 0; j < 6 && j < num; j++)
491 args[j] = gimple_call_arg (stmt, j);
492 for (i = 0; i < ARRAY_SIZE (tsan_atomic_table); i++)
493 if (fcode != tsan_atomic_table[i].fcode)
494 continue;
495 else
496 {
497 tree decl = builtin_decl_implicit (tsan_atomic_table[i].tsan_fcode);
498 if (decl == NULL_TREE)
499 return;
500 switch (tsan_atomic_table[i].action)
501 {
502 case check_last:
503 case fetch_op:
504 last_arg = gimple_call_arg (stmt, num - 1);
505 if (tree_fits_uhwi_p (last_arg)
506 && memmodel_base (tree_to_uhwi (last_arg)) >= MEMMODEL_LAST)
507 return;
508 gimple_call_set_fndecl (stmt, decl);
509 update_stmt (stmt);
510 maybe_clean_eh_stmt (stmt);
511 if (tsan_atomic_table[i].action == fetch_op)
512 {
513 args[1] = gimple_call_arg (stmt, 1);
514 goto adjust_result;
515 }
516 return;
517 case add_seq_cst:
518 case add_acquire:
519 case fetch_op_seq_cst:
520 gcc_assert (num <= 2);
521 for (j = 0; j < num; j++)
522 args[j] = gimple_call_arg (stmt, j);
523 for (; j < 2; j++)
524 args[j] = NULL_TREE;
525 args[num] = build_int_cst (NULL_TREE,
526 tsan_atomic_table[i].action
527 != add_acquire
528 ? MEMMODEL_SEQ_CST
529 : MEMMODEL_ACQUIRE);
530 update_gimple_call (gsi, decl, num + 1, args[0], args[1], args[2]);
531 maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
532 stmt = gsi_stmt (*gsi);
533 if (tsan_atomic_table[i].action == fetch_op_seq_cst)
534 {
535 adjust_result:
536 lhs = gimple_call_lhs (stmt);
537 if (lhs == NULL_TREE)
538 return;
539 if (!useless_type_conversion_p (TREE_TYPE (lhs),
540 TREE_TYPE (args[1])))
541 {
542 tree var = make_ssa_name (TREE_TYPE (lhs));
543 g = gimple_build_assign (var, NOP_EXPR, args[1]);
544 gsi_insert_after (gsi, g, GSI_NEW_STMT);
545 args[1] = var;
546 }
547 gimple_call_set_lhs (stmt, make_ssa_name (TREE_TYPE (lhs)));
548 /* BIT_NOT_EXPR stands for NAND. */
549 if (tsan_atomic_table[i].code == BIT_NOT_EXPR)
550 {
551 tree var = make_ssa_name (TREE_TYPE (lhs));
552 g = gimple_build_assign (var, BIT_AND_EXPR,
553 gimple_call_lhs (stmt), args[1]);
554 gsi_insert_after (gsi, g, GSI_NEW_STMT);
555 g = gimple_build_assign (lhs, BIT_NOT_EXPR, var);
556 }
557 else
558 g = gimple_build_assign (lhs, tsan_atomic_table[i].code,
559 gimple_call_lhs (stmt), args[1]);
560 update_stmt (stmt);
561 gsi_insert_after (gsi, g, GSI_NEW_STMT);
562 }
563 return;
564 case weak_cas:
565 if (!integer_nonzerop (gimple_call_arg (stmt, 3)))
566 continue;
567 /* FALLTHRU */
568 case strong_cas:
569 gcc_assert (num == 6);
570 for (j = 0; j < 6; j++)
571 args[j] = gimple_call_arg (stmt, j);
572 if (tree_fits_uhwi_p (args[4])
573 && memmodel_base (tree_to_uhwi (args[4])) >= MEMMODEL_LAST)
574 return;
575 if (tree_fits_uhwi_p (args[5])
576 && memmodel_base (tree_to_uhwi (args[5])) >= MEMMODEL_LAST)
577 return;
578 update_gimple_call (gsi, decl, 5, args[0], args[1], args[2],
579 args[4], args[5]);
580 maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
581 return;
582 case bool_cas:
583 case val_cas:
584 gcc_assert (num == 3);
585 for (j = 0; j < 3; j++)
586 args[j] = gimple_call_arg (stmt, j);
587 t = TYPE_ARG_TYPES (TREE_TYPE (decl));
588 t = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (t)));
589 t = create_tmp_var (t);
590 mark_addressable (t);
591 if (!useless_type_conversion_p (TREE_TYPE (t),
592 TREE_TYPE (args[1])))
593 {
594 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)),
595 NOP_EXPR, args[1]);
596 gsi_insert_before (gsi, g, GSI_SAME_STMT);
597 args[1] = gimple_assign_lhs (g);
598 }
599 g = gimple_build_assign (t, args[1]);
600 gsi_insert_before (gsi, g, GSI_SAME_STMT);
601 lhs = gimple_call_lhs (stmt);
602 update_gimple_call (gsi, decl, 5, args[0],
603 build_fold_addr_expr (t), args[2],
604 build_int_cst (NULL_TREE,
605 MEMMODEL_SEQ_CST),
606 build_int_cst (NULL_TREE,
607 MEMMODEL_SEQ_CST));
608 maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
609 if (tsan_atomic_table[i].action == val_cas && lhs)
610 {
611 tree cond;
612 stmt = gsi_stmt (*gsi);
613 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
614 gsi_insert_after (gsi, g, GSI_NEW_STMT);
615 t = make_ssa_name (TREE_TYPE (TREE_TYPE (decl)), stmt);
616 cond = build2 (NE_EXPR, boolean_type_node, t,
617 build_int_cst (TREE_TYPE (t), 0));
618 g = gimple_build_assign (lhs, COND_EXPR, cond, args[1],
619 gimple_assign_lhs (g));
620 gimple_call_set_lhs (stmt, t);
621 update_stmt (stmt);
622 gsi_insert_after (gsi, g, GSI_NEW_STMT);
623 }
624 return;
625 case lock_release:
626 gcc_assert (num == 1);
627 t = TYPE_ARG_TYPES (TREE_TYPE (decl));
628 t = TREE_VALUE (TREE_CHAIN (t));
629 update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0),
630 build_int_cst (t, 0),
631 build_int_cst (NULL_TREE,
632 MEMMODEL_RELEASE));
633 maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
634 return;
635 case bool_clear:
636 case bool_test_and_set:
637 if (BOOL_TYPE_SIZE != 8)
638 {
639 decl = NULL_TREE;
640 for (j = 1; j < 5; j++)
641 if (BOOL_TYPE_SIZE == (8 << j))
642 {
643 enum built_in_function tsan_fcode
644 = (enum built_in_function)
645 (tsan_atomic_table[i].tsan_fcode + j);
646 decl = builtin_decl_implicit (tsan_fcode);
647 break;
648 }
649 if (decl == NULL_TREE)
650 return;
651 }
652 last_arg = gimple_call_arg (stmt, num - 1);
653 if (tree_fits_uhwi_p (last_arg)
654 && memmodel_base (tree_to_uhwi (last_arg)) >= MEMMODEL_LAST)
655 return;
656 t = TYPE_ARG_TYPES (TREE_TYPE (decl));
657 t = TREE_VALUE (TREE_CHAIN (t));
658 if (tsan_atomic_table[i].action == bool_clear)
659 {
660 update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0),
661 build_int_cst (t, 0), last_arg);
662 maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
663 return;
664 }
665 t = build_int_cst (t, targetm.atomic_test_and_set_trueval);
666 update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0),
667 t, last_arg);
668 maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (*gsi));
669 stmt = gsi_stmt (*gsi);
670 lhs = gimple_call_lhs (stmt);
671 if (lhs == NULL_TREE)
672 return;
673 if (targetm.atomic_test_and_set_trueval != 1
674 || !useless_type_conversion_p (TREE_TYPE (lhs),
675 TREE_TYPE (t)))
676 {
677 tree new_lhs = make_ssa_name (TREE_TYPE (t));
678 gimple_call_set_lhs (stmt, new_lhs);
679 if (targetm.atomic_test_and_set_trueval != 1)
680 g = gimple_build_assign (lhs, NE_EXPR, new_lhs,
681 build_int_cst (TREE_TYPE (t), 0));
682 else
683 g = gimple_build_assign (lhs, NOP_EXPR, new_lhs);
684 gsi_insert_after (gsi, g, GSI_NEW_STMT);
685 update_stmt (stmt);
686 }
687 return;
688 default:
689 continue;
690 }
691 }
692 }
693
694 /* Instruments the gimple pointed to by GSI. Return
695 true if func entry/exit should be instrumented. */
696
697 static bool
698 instrument_gimple (gimple_stmt_iterator *gsi)
699 {
700 gimple *stmt;
701 tree rhs, lhs;
702 bool instrumented = false;
703
704 stmt = gsi_stmt (*gsi);
705 if (is_gimple_call (stmt)
706 && (gimple_call_fndecl (stmt)
707 != builtin_decl_implicit (BUILT_IN_TSAN_INIT)))
708 {
709 /* All functions with function call will have exit instrumented,
710 therefore no function calls other than __tsan_func_exit
711 shall appear in the functions. */
712 gimple_call_set_tail (as_a <gcall *> (stmt), false);
713 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
714 instrument_builtin_call (gsi);
715 return true;
716 }
717 else if (is_gimple_assign (stmt)
718 && !gimple_clobber_p (stmt))
719 {
720 if (gimple_store_p (stmt))
721 {
722 lhs = gimple_assign_lhs (stmt);
723 instrumented = instrument_expr (*gsi, lhs, true);
724 }
725 if (gimple_assign_load_p (stmt))
726 {
727 rhs = gimple_assign_rhs1 (stmt);
728 instrumented = instrument_expr (*gsi, rhs, false);
729 }
730 }
731 return instrumented;
732 }
733
734 /* Replace TSAN_FUNC_EXIT internal call with function exit tsan builtin. */
735
736 static void
737 replace_func_exit (gimple *stmt)
738 {
739 tree builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT);
740 gimple *g = gimple_build_call (builtin_decl, 0);
741 gimple_set_location (g, cfun->function_end_locus);
742 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
743 gsi_replace (&gsi, g, true);
744 }
745
746 /* Instrument function exit. Used when TSAN_FUNC_EXIT does not exist. */
747
748 static void
749 instrument_func_exit (void)
750 {
751 location_t loc;
752 basic_block exit_bb;
753 gimple_stmt_iterator gsi;
754 gimple *stmt, *g;
755 tree builtin_decl;
756 edge e;
757 edge_iterator ei;
758
759 /* Find all function exits. */
760 exit_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
761 FOR_EACH_EDGE (e, ei, exit_bb->preds)
762 {
763 gsi = gsi_last_bb (e->src);
764 stmt = gsi_stmt (gsi);
765 gcc_assert (gimple_code (stmt) == GIMPLE_RETURN
766 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN));
767 loc = gimple_location (stmt);
768 builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT);
769 g = gimple_build_call (builtin_decl, 0);
770 gimple_set_location (g, loc);
771 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
772 }
773 }
774
775 /* Instruments all interesting memory accesses in the current function.
776 Return true if func entry/exit should be instrumented. */
777
778 static bool
779 instrument_memory_accesses (bool *cfg_changed)
780 {
781 basic_block bb;
782 gimple_stmt_iterator gsi;
783 bool fentry_exit_instrument = false;
784 bool func_exit_seen = false;
785 auto_vec<gimple *> tsan_func_exits;
786
787 FOR_EACH_BB_FN (bb, cfun)
788 {
789 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
790 {
791 gimple *stmt = gsi_stmt (gsi);
792 if (gimple_call_internal_p (stmt, IFN_TSAN_FUNC_EXIT))
793 {
794 if (fentry_exit_instrument)
795 replace_func_exit (stmt);
796 else
797 tsan_func_exits.safe_push (stmt);
798 func_exit_seen = true;
799 }
800 else
801 fentry_exit_instrument |= instrument_gimple (&gsi);
802 }
803 if (gimple_purge_dead_eh_edges (bb))
804 *cfg_changed = true;
805 }
806 unsigned int i;
807 gimple *stmt;
808 FOR_EACH_VEC_ELT (tsan_func_exits, i, stmt)
809 if (fentry_exit_instrument)
810 replace_func_exit (stmt);
811 else
812 {
813 gsi = gsi_for_stmt (stmt);
814 gsi_remove (&gsi, true);
815 }
816 if (fentry_exit_instrument && !func_exit_seen)
817 instrument_func_exit ();
818 return fentry_exit_instrument;
819 }
820
821 /* Instruments function entry. */
822
823 static void
824 instrument_func_entry (void)
825 {
826 tree ret_addr, builtin_decl;
827 gimple *g;
828 gimple_seq seq = NULL;
829
830 builtin_decl = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
831 g = gimple_build_call (builtin_decl, 1, integer_zero_node);
832 ret_addr = make_ssa_name (ptr_type_node);
833 gimple_call_set_lhs (g, ret_addr);
834 gimple_set_location (g, cfun->function_start_locus);
835 gimple_seq_add_stmt_without_update (&seq, g);
836
837 builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_ENTRY);
838 g = gimple_build_call (builtin_decl, 1, ret_addr);
839 gimple_set_location (g, cfun->function_start_locus);
840 gimple_seq_add_stmt_without_update (&seq, g);
841
842 edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
843 gsi_insert_seq_on_edge_immediate (e, seq);
844 }
845
846 /* ThreadSanitizer instrumentation pass. */
847
848 static unsigned
849 tsan_pass (void)
850 {
851 initialize_sanitizer_builtins ();
852 bool cfg_changed = false;
853 if (instrument_memory_accesses (&cfg_changed))
854 instrument_func_entry ();
855 return cfg_changed ? TODO_cleanup_cfg : 0;
856 }
857
858 /* Inserts __tsan_init () into the list of CTORs. */
859
860 void
861 tsan_finish_file (void)
862 {
863 tree ctor_statements = NULL_TREE;
864
865 initialize_sanitizer_builtins ();
866 tree init_decl = builtin_decl_implicit (BUILT_IN_TSAN_INIT);
867 append_to_statement_list (build_call_expr (init_decl, 0),
868 &ctor_statements);
869 cgraph_build_static_cdtor ('I', ctor_statements,
870 MAX_RESERVED_INIT_PRIORITY - 1);
871 }
872
873 /* The pass descriptor. */
874
875 namespace {
876
877 const pass_data pass_data_tsan =
878 {
879 GIMPLE_PASS, /* type */
880 "tsan", /* name */
881 OPTGROUP_NONE, /* optinfo_flags */
882 TV_NONE, /* tv_id */
883 ( PROP_ssa | PROP_cfg ), /* properties_required */
884 0, /* properties_provided */
885 0, /* properties_destroyed */
886 0, /* todo_flags_start */
887 TODO_update_ssa, /* todo_flags_finish */
888 };
889
890 class pass_tsan : public gimple_opt_pass
891 {
892 public:
893 pass_tsan (gcc::context *ctxt)
894 : gimple_opt_pass (pass_data_tsan, ctxt)
895 {}
896
897 /* opt_pass methods: */
898 opt_pass * clone () { return new pass_tsan (m_ctxt); }
899 virtual bool gate (function *)
900 {
901 return sanitize_flags_p (SANITIZE_THREAD);
902 }
903
904 virtual unsigned int execute (function *) { return tsan_pass (); }
905
906 }; // class pass_tsan
907
908 } // anon namespace
909
910 gimple_opt_pass *
911 make_pass_tsan (gcc::context *ctxt)
912 {
913 return new pass_tsan (ctxt);
914 }
915
916 namespace {
917
918 const pass_data pass_data_tsan_O0 =
919 {
920 GIMPLE_PASS, /* type */
921 "tsan0", /* name */
922 OPTGROUP_NONE, /* optinfo_flags */
923 TV_NONE, /* tv_id */
924 ( PROP_ssa | PROP_cfg ), /* properties_required */
925 0, /* properties_provided */
926 0, /* properties_destroyed */
927 0, /* todo_flags_start */
928 TODO_update_ssa, /* todo_flags_finish */
929 };
930
931 class pass_tsan_O0 : public gimple_opt_pass
932 {
933 public:
934 pass_tsan_O0 (gcc::context *ctxt)
935 : gimple_opt_pass (pass_data_tsan_O0, ctxt)
936 {}
937
938 /* opt_pass methods: */
939 virtual bool gate (function *)
940 {
941 return (sanitize_flags_p (SANITIZE_THREAD) && !optimize);
942 }
943
944 virtual unsigned int execute (function *) { return tsan_pass (); }
945
946 }; // class pass_tsan_O0
947
948 } // anon namespace
949
950 gimple_opt_pass *
951 make_pass_tsan_O0 (gcc::context *ctxt)
952 {
953 return new pass_tsan_O0 (ctxt);
954 }